code
stringlengths 3
1.05M
| repo_name
stringlengths 4
116
| path
stringlengths 3
942
| language
stringclasses 30
values | license
stringclasses 15
values | size
int32 3
1.05M
|
---|---|---|---|---|---|
<?php
namespace Jazzee\Element;
/**
* Phonenumber Element
*
* @author Jon Johnson <[email protected]>
* @license http://jazzee.org/license BSD-3-Clause
*/
class Phonenumber extends TextInput
{
const PAGEBUILDER_SCRIPT = 'resource/scripts/element_types/JazzeeElementPhonenumber.js';
public function addToField(\Foundation\Form\Field $field)
{
$element = $field->newElement('TextInput', 'el' . $this->_element->getId());
$element->setLabel($this->_element->getTitle());
$element->setInstructions($this->_element->getInstructions());
$element->setFormat($this->_element->getFormat());
$element->setDefaultValue($this->_element->getDefaultValue());
if ($this->_element->isRequired()) {
$validator = new \Foundation\Form\Validator\NotEmpty($element);
$element->addValidator($validator);
}
$validator = new \Foundation\Form\Validator\Phonenumber($element);
$element->addValidator($validator);
$filter = new \Foundation\Form\Filter\Phonenumber($element);
$element->addFilter($filter);
return $element;
}
}
|
Jazzee/Jazzee
|
src/Jazzee/Element/Phonenumber.php
|
PHP
|
bsd-3-clause
| 1,083 |
# for
for i in 1..10 do
puts i
end
|
TJ-Hidetaka-Takano/mrubyc
|
sample_ruby/basic_sample05.rb
|
Ruby
|
bsd-3-clause
| 39 |
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/media/webrtc/permission_bubble_media_access_handler.h"
#include <memory>
#include <utility>
#include "base/bind.h"
#include "base/callback_helpers.h"
#include "base/metrics/field_trial.h"
#include "base/task/post_task.h"
#include "build/build_config.h"
#include "chrome/browser/media/webrtc/media_capture_devices_dispatcher.h"
#include "chrome/browser/media/webrtc/media_stream_capture_indicator.h"
#include "chrome/browser/media/webrtc/media_stream_device_permissions.h"
#include "chrome/browser/permissions/permission_manager_factory.h"
#include "chrome/browser/profiles/profile.h"
#include "chrome/common/pref_names.h"
#include "components/content_settings/browser/tab_specific_content_settings.h"
#include "components/content_settings/core/browser/host_content_settings_map.h"
#include "components/permissions/permission_manager.h"
#include "components/permissions/permission_result.h"
#include "components/pref_registry/pref_registry_syncable.h"
#include "components/prefs/pref_service.h"
#include "components/webrtc/media_stream_devices_controller.h"
#include "content/public/browser/browser_task_traits.h"
#include "content/public/browser/browser_thread.h"
#include "content/public/browser/notification_service.h"
#include "content/public/browser/notification_types.h"
#include "content/public/browser/web_contents.h"
#if defined(OS_ANDROID)
#include <vector>
#include "chrome/browser/flags/android/chrome_feature_list.h"
#include "chrome/browser/media/webrtc/screen_capture_infobar_delegate_android.h"
#include "components/permissions/permission_uma_util.h"
#include "components/permissions/permission_util.h"
#endif // defined(OS_ANDROID)
#if defined(OS_MACOSX)
#include "base/metrics/histogram_macros.h"
#include "chrome/browser/content_settings/chrome_content_settings_utils.h"
#include "chrome/browser/media/webrtc/system_media_capture_permissions_mac.h"
#include "chrome/browser/media/webrtc/system_media_capture_permissions_stats_mac.h"
#endif
using content::BrowserThread;
using RepeatingMediaResponseCallback =
base::RepeatingCallback<void(const blink::MediaStreamDevices& devices,
blink::mojom::MediaStreamRequestResult result,
std::unique_ptr<content::MediaStreamUI> ui)>;
#if defined(OS_MACOSX)
using system_media_permissions::SystemPermission;
#endif
namespace {
void UpdateTabSpecificContentSettings(
content::WebContents* web_contents,
const content::MediaStreamRequest& request,
ContentSetting audio_setting,
ContentSetting video_setting) {
if (!web_contents)
return;
auto* content_settings =
content_settings::TabSpecificContentSettings::FromWebContents(
web_contents);
if (!content_settings)
return;
content_settings::TabSpecificContentSettings::MicrophoneCameraState
microphone_camera_state = content_settings::TabSpecificContentSettings::
MICROPHONE_CAMERA_NOT_ACCESSED;
std::string selected_audio_device;
std::string selected_video_device;
std::string requested_audio_device = request.requested_audio_device_id;
std::string requested_video_device = request.requested_video_device_id;
// TODO(raymes): Why do we use the defaults here for the selected devices?
// Shouldn't we just use the devices that were actually selected?
Profile* profile =
Profile::FromBrowserContext(web_contents->GetBrowserContext());
if (audio_setting != CONTENT_SETTING_DEFAULT) {
selected_audio_device =
requested_audio_device.empty()
? profile->GetPrefs()->GetString(prefs::kDefaultAudioCaptureDevice)
: requested_audio_device;
microphone_camera_state |=
content_settings::TabSpecificContentSettings::MICROPHONE_ACCESSED |
(audio_setting == CONTENT_SETTING_ALLOW
? 0
: content_settings::TabSpecificContentSettings::
MICROPHONE_BLOCKED);
}
if (video_setting != CONTENT_SETTING_DEFAULT) {
selected_video_device =
requested_video_device.empty()
? profile->GetPrefs()->GetString(prefs::kDefaultVideoCaptureDevice)
: requested_video_device;
microphone_camera_state |=
content_settings::TabSpecificContentSettings::CAMERA_ACCESSED |
(video_setting == CONTENT_SETTING_ALLOW
? 0
: content_settings::TabSpecificContentSettings::CAMERA_BLOCKED);
}
content_settings->OnMediaStreamPermissionSet(
PermissionManagerFactory::GetForProfile(profile)->GetCanonicalOrigin(
ContentSettingsType::MEDIASTREAM_CAMERA, request.security_origin,
web_contents->GetLastCommittedURL()),
microphone_camera_state, selected_audio_device, selected_video_device,
requested_audio_device, requested_video_device);
}
} // namespace
struct PermissionBubbleMediaAccessHandler::PendingAccessRequest {
PendingAccessRequest(const content::MediaStreamRequest& request,
RepeatingMediaResponseCallback callback)
: request(request), callback(callback) {}
~PendingAccessRequest() {}
// TODO(gbillock): make the MediaStreamDevicesController owned by
// this object when we're using bubbles.
content::MediaStreamRequest request;
RepeatingMediaResponseCallback callback;
};
PermissionBubbleMediaAccessHandler::PermissionBubbleMediaAccessHandler() {
// PermissionBubbleMediaAccessHandler should be created on UI thread.
// Otherwise, it will not receive
// content::NOTIFICATION_WEB_CONTENTS_DESTROYED, and that will result in
// possible use after free.
DCHECK_CURRENTLY_ON(BrowserThread::UI);
notifications_registrar_.Add(this,
content::NOTIFICATION_WEB_CONTENTS_DESTROYED,
content::NotificationService::AllSources());
}
PermissionBubbleMediaAccessHandler::~PermissionBubbleMediaAccessHandler() {}
bool PermissionBubbleMediaAccessHandler::SupportsStreamType(
content::WebContents* web_contents,
const blink::mojom::MediaStreamType type,
const extensions::Extension* extension) {
#if defined(OS_ANDROID)
return type == blink::mojom::MediaStreamType::DEVICE_VIDEO_CAPTURE ||
type == blink::mojom::MediaStreamType::DEVICE_AUDIO_CAPTURE ||
type == blink::mojom::MediaStreamType::GUM_DESKTOP_VIDEO_CAPTURE ||
type == blink::mojom::MediaStreamType::DISPLAY_VIDEO_CAPTURE;
#else
return type == blink::mojom::MediaStreamType::DEVICE_VIDEO_CAPTURE ||
type == blink::mojom::MediaStreamType::DEVICE_AUDIO_CAPTURE;
#endif
}
bool PermissionBubbleMediaAccessHandler::CheckMediaAccessPermission(
content::RenderFrameHost* render_frame_host,
const GURL& security_origin,
blink::mojom::MediaStreamType type,
const extensions::Extension* extension) {
content::WebContents* web_contents =
content::WebContents::FromRenderFrameHost(render_frame_host);
Profile* profile =
Profile::FromBrowserContext(web_contents->GetBrowserContext());
ContentSettingsType content_settings_type =
type == blink::mojom::MediaStreamType::DEVICE_AUDIO_CAPTURE
? ContentSettingsType::MEDIASTREAM_MIC
: ContentSettingsType::MEDIASTREAM_CAMERA;
DCHECK(!security_origin.is_empty());
GURL embedding_origin = web_contents->GetLastCommittedURL().GetOrigin();
permissions::PermissionManager* permission_manager =
PermissionManagerFactory::GetForProfile(profile);
return permission_manager
->GetPermissionStatusForFrame(content_settings_type,
render_frame_host, security_origin)
.content_setting == CONTENT_SETTING_ALLOW;
}
void PermissionBubbleMediaAccessHandler::HandleRequest(
content::WebContents* web_contents,
const content::MediaStreamRequest& request,
content::MediaResponseCallback callback,
const extensions::Extension* extension) {
DCHECK_CURRENTLY_ON(BrowserThread::UI);
#if defined(OS_ANDROID)
if (blink::IsScreenCaptureMediaType(request.video_type) &&
!base::FeatureList::IsEnabled(
chrome::android::kUserMediaScreenCapturing)) {
// If screen capturing isn't enabled on Android, we'll use "invalid state"
// as result, same as on desktop.
std::move(callback).Run(
blink::MediaStreamDevices(),
blink::mojom::MediaStreamRequestResult::INVALID_STATE, nullptr);
return;
}
#endif // defined(OS_ANDROID)
RequestsMap& requests_map = pending_requests_[web_contents];
requests_map.emplace(
next_request_id_++,
PendingAccessRequest(
request, base::AdaptCallbackForRepeating(std::move(callback))));
// If this is the only request then show the infobar.
if (requests_map.size() == 1)
ProcessQueuedAccessRequest(web_contents);
}
void PermissionBubbleMediaAccessHandler::ProcessQueuedAccessRequest(
content::WebContents* web_contents) {
DCHECK_CURRENTLY_ON(BrowserThread::UI);
auto it = pending_requests_.find(web_contents);
if (it == pending_requests_.end() || it->second.empty()) {
// Don't do anything if the tab was closed.
return;
}
DCHECK(!it->second.empty());
const int request_id = it->second.begin()->first;
const content::MediaStreamRequest& request =
it->second.begin()->second.request;
#if defined(OS_ANDROID)
if (blink::IsScreenCaptureMediaType(request.video_type)) {
ScreenCaptureInfoBarDelegateAndroid::Create(
web_contents, request,
base::BindOnce(
&PermissionBubbleMediaAccessHandler::OnAccessRequestResponse,
base::Unretained(this), web_contents, request_id));
return;
}
#endif
webrtc::MediaStreamDevicesController::RequestPermissions(
request, MediaCaptureDevicesDispatcher::GetInstance(),
base::BindOnce(
&PermissionBubbleMediaAccessHandler::OnMediaStreamRequestResponse,
base::Unretained(this), web_contents, request_id, request));
}
void PermissionBubbleMediaAccessHandler::UpdateMediaRequestState(
int render_process_id,
int render_frame_id,
int page_request_id,
blink::mojom::MediaStreamType stream_type,
content::MediaRequestState state) {
DCHECK_CURRENTLY_ON(BrowserThread::UI);
if (state != content::MEDIA_REQUEST_STATE_CLOSING)
return;
bool found = false;
for (auto requests_it = pending_requests_.begin();
requests_it != pending_requests_.end(); ++requests_it) {
RequestsMap& requests_map = requests_it->second;
for (RequestsMap::iterator it = requests_map.begin();
it != requests_map.end(); ++it) {
if (it->second.request.render_process_id == render_process_id &&
it->second.request.render_frame_id == render_frame_id &&
it->second.request.page_request_id == page_request_id) {
requests_map.erase(it);
found = true;
break;
}
}
if (found)
break;
}
}
// static
void PermissionBubbleMediaAccessHandler::RegisterProfilePrefs(
user_prefs::PrefRegistrySyncable* prefs) {
prefs->RegisterBooleanPref(prefs::kVideoCaptureAllowed, true);
prefs->RegisterBooleanPref(prefs::kAudioCaptureAllowed, true);
prefs->RegisterListPref(prefs::kVideoCaptureAllowedUrls);
prefs->RegisterListPref(prefs::kAudioCaptureAllowedUrls);
}
void PermissionBubbleMediaAccessHandler::OnMediaStreamRequestResponse(
content::WebContents* web_contents,
int request_id,
content::MediaStreamRequest request,
const blink::MediaStreamDevices& devices,
blink::mojom::MediaStreamRequestResult result,
bool blocked_by_feature_policy,
ContentSetting audio_setting,
ContentSetting video_setting) {
if (pending_requests_.find(web_contents) == pending_requests_.end()) {
// WebContents has been destroyed. Don't need to do anything.
return;
}
// If the kill switch is, or the request was blocked because of feature
// policy we don't update the tab context.
if (result != blink::mojom::MediaStreamRequestResult::KILL_SWITCH_ON &&
!blocked_by_feature_policy) {
UpdateTabSpecificContentSettings(web_contents, request, audio_setting,
video_setting);
}
std::unique_ptr<content::MediaStreamUI> ui;
if (!devices.empty()) {
ui = MediaCaptureDevicesDispatcher::GetInstance()
->GetMediaStreamCaptureIndicator()
->RegisterMediaStream(web_contents, devices);
}
OnAccessRequestResponse(web_contents, request_id, devices, result,
std::move(ui));
}
void PermissionBubbleMediaAccessHandler::OnAccessRequestResponse(
content::WebContents* web_contents,
int request_id,
const blink::MediaStreamDevices& devices,
blink::mojom::MediaStreamRequestResult result,
std::unique_ptr<content::MediaStreamUI> ui) {
DCHECK_CURRENTLY_ON(BrowserThread::UI);
auto request_maps_it = pending_requests_.find(web_contents);
if (request_maps_it == pending_requests_.end()) {
// WebContents has been destroyed. Don't need to do anything.
return;
}
RequestsMap& requests_map(request_maps_it->second);
if (requests_map.empty())
return;
auto request_it = requests_map.find(request_id);
DCHECK(request_it != requests_map.end());
if (request_it == requests_map.end())
return;
blink::mojom::MediaStreamRequestResult final_result = result;
#if defined(OS_MACOSX)
// If the request was approved, ask for system permissions if needed, and run
// this function again when done.
if (result == blink::mojom::MediaStreamRequestResult::OK) {
const content::MediaStreamRequest& request = request_it->second.request;
if (request.audio_type ==
blink::mojom::MediaStreamType::DEVICE_AUDIO_CAPTURE) {
const SystemPermission system_audio_permission =
system_media_permissions::CheckSystemAudioCapturePermission();
UMA_HISTOGRAM_ENUMERATION(
"Media.Audio.Capture.Mac.MicSystemPermission.UserMedia",
system_audio_permission);
if (system_audio_permission == SystemPermission::kNotDetermined) {
// Using WeakPtr since callback can come at any time and we might be
// destroyed.
system_media_permissions::RequestSystemAudioCapturePermisson(
base::BindOnce(
&PermissionBubbleMediaAccessHandler::OnAccessRequestResponse,
weak_factory_.GetWeakPtr(), web_contents, request_id, devices,
result, std::move(ui)),
{content::BrowserThread::UI});
return;
} else if (system_audio_permission == SystemPermission::kRestricted ||
system_audio_permission == SystemPermission::kDenied) {
content_settings::UpdateLocationBarUiForWebContents(web_contents);
final_result =
blink::mojom::MediaStreamRequestResult::SYSTEM_PERMISSION_DENIED;
system_media_permissions::SystemAudioCapturePermissionBlocked();
} else {
DCHECK_EQ(system_audio_permission, SystemPermission::kAllowed);
content_settings::UpdateLocationBarUiForWebContents(web_contents);
}
}
if (request.video_type ==
blink::mojom::MediaStreamType::DEVICE_VIDEO_CAPTURE) {
const SystemPermission system_video_permission =
system_media_permissions::CheckSystemVideoCapturePermission();
UMA_HISTOGRAM_ENUMERATION(
"Media.Video.Capture.Mac.CameraSystemPermission.UserMedia",
system_video_permission);
if (system_video_permission == SystemPermission::kNotDetermined) {
// Using WeakPtr since callback can come at any time and we might be
// destroyed.
system_media_permissions::RequestSystemVideoCapturePermisson(
base::BindOnce(
&PermissionBubbleMediaAccessHandler::OnAccessRequestResponse,
weak_factory_.GetWeakPtr(), web_contents, request_id, devices,
result, std::move(ui)),
{content::BrowserThread::UI});
return;
} else if (system_video_permission == SystemPermission::kRestricted ||
system_video_permission == SystemPermission::kDenied) {
content_settings::UpdateLocationBarUiForWebContents(web_contents);
final_result =
blink::mojom::MediaStreamRequestResult::SYSTEM_PERMISSION_DENIED;
system_media_permissions::SystemVideoCapturePermissionBlocked();
} else {
DCHECK_EQ(system_video_permission, SystemPermission::kAllowed);
content_settings::UpdateLocationBarUiForWebContents(web_contents);
}
}
}
#endif // defined(OS_MACOSX)
RepeatingMediaResponseCallback callback =
std::move(request_it->second.callback);
requests_map.erase(request_it);
if (!requests_map.empty()) {
// Post a task to process next queued request. It has to be done
// asynchronously to make sure that calling infobar is not destroyed until
// after this function returns.
base::PostTask(
FROM_HERE, {BrowserThread::UI},
base::BindOnce(
&PermissionBubbleMediaAccessHandler::ProcessQueuedAccessRequest,
base::Unretained(this), web_contents));
}
std::move(callback).Run(devices, final_result, std::move(ui));
}
void PermissionBubbleMediaAccessHandler::Observe(
int type,
const content::NotificationSource& source,
const content::NotificationDetails& details) {
DCHECK_CURRENTLY_ON(BrowserThread::UI);
DCHECK_EQ(content::NOTIFICATION_WEB_CONTENTS_DESTROYED, type);
pending_requests_.erase(content::Source<content::WebContents>(source).ptr());
}
|
endlessm/chromium-browser
|
chrome/browser/media/webrtc/permission_bubble_media_access_handler.cc
|
C++
|
bsd-3-clause
| 17,709 |
# Copyright (c) 2006-2009 The Trustees of Indiana University.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# - Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# - Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# - Neither the Indiana University nor the names of its contributors may be used
# to endorse or promote products derived from this software without specific
# prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from corepy.spre.spe import Instruction, DispatchInstruction, Register
from spu_insts import *
__doc__="""
ISA for the Cell Broadband Engine's SPU.
"""
class lqx(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':452}
cycles = (1, 6, 0)
class stqx(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':324}
cycles = (1, 6, 0)
class cbx(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':468}
cycles = (1, 4, 0)
class chx(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':469}
cycles = (1, 4, 0)
class cwx(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':470}
cycles = (1, 4, 0)
class cdx(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':471}
cycles = (1, 4, 0)
class ah(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':200}
cycles = (0, 2, 0)
class a(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':192}
cycles = (0, 2, 0)
class sfh(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':72}
cycles = (0, 2, 0)
class sf(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':64}
cycles = (0, 2, 0)
class addx(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':832}
cycles = (0, 2, 0)
class cg(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':194}
cycles = (0, 2, 0)
class cgx(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':834}
cycles = (0, 2, 0)
class sfx(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':833}
cycles = (0, 2, 0)
class bg(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':66}
cycles = (0, 2, 0)
class bgx(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':835}
cycles = (0, 2, 0)
class mpy(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':964}
cycles = (0, 7, 0)
class mpyu(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':972}
cycles = (0, 7, 0)
class mpyh(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':965}
cycles = (0, 7, 0)
class mpys(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':967}
cycles = (0, 7, 0)
class mpyhh(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':966}
cycles = (0, 7, 0)
class mpyhha(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':838}
cycles = (0, 7, 0)
class mpyhhu(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':974}
cycles = (0, 7, 0)
class mpyhhau(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':846}
cycles = (0, 7, 0)
class clz(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':677}
cycles = (0, 2, 0)
class cntb(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':692}
cycles = (0, 4, 0)
class fsmb(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':438}
cycles = (1, 4, 0)
class fsmh(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':437}
cycles = (1, 4, 0)
class fsm(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':436}
cycles = (1, 4, 0)
class gbb(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':434}
cycles = (1, 4, 0)
class gbh(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':433}
cycles = (1, 4, 0)
class gb(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':432}
cycles = (1, 4, 0)
class avgb(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':211}
cycles = (0, 4, 0)
class absdb(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':83}
cycles = (0, 4, 0)
class sumb(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':595}
cycles = (0, 4, 0)
class xsbh(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':694}
cycles = (0, 2, 0)
class xshw(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':686}
cycles = (0, 2, 0)
class xswd(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':678}
cycles = (0, 2, 0)
class and_(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':193}
cycles = (0, 2, 0)
class andc(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':705}
cycles = (0, 2, 0)
class or_(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':65}
cycles = (0, 2, 0)
class orc(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':713}
cycles = (0, 2, 0)
class orx(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':496}
cycles = (1, 4, 0)
class xor(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':577}
cycles = (0, 2, 0)
class nand(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':201}
cycles = (0, 2, 0)
class nor(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':73}
cycles = (0, 2, 0)
class eqv(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':585}
cycles = (0, 2, 0)
class shlh(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':95}
cycles = (0, 4, 0)
class shl(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':91}
cycles = (0, 4, 0)
class shlqbi(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':475}
cycles = (1, 4, 0)
class shlqby(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':479}
cycles = (1, 4, 0)
class shlqbybi(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':463}
cycles = (1, 4, 0)
class roth(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':92}
cycles = (0, 4, 0)
class rot(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':88}
cycles = (0, 4, 0)
class rotqby(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':476}
cycles = (1, 4, 0)
class rotqbybi(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':460}
cycles = (1, 4, 0)
class rotqbi(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':472}
cycles = (1, 4, 0)
class rothm(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':93}
cycles = (0, 4, 0)
class rotm(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':89}
cycles = (0, 4, 0)
class rotqmby(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':477}
cycles = (1, 4, 0)
class rotqmbybi(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':461}
cycles = (1, 4, 0)
class rotqmbi(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':473}
cycles = (1, 4, 0)
class rotmah(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':94}
cycles = (0, 4, 0)
class rotma(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':90}
cycles = (0, 4, 0)
class heq(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':984}
cycles = (0, 2, 0)
class hgt(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':600}
cycles = (0, 2, 0)
class hlgt(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':728}
cycles = (0, 2, 0)
class ceqb(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':976}
cycles = (0, 2, 0)
class ceqh(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':968}
cycles = (0, 2, 0)
class ceq(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':960}
cycles = (0, 2, 0)
class cgtb(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':592}
cycles = (0, 2, 0)
class cgth(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':584}
cycles = (0, 2, 0)
class cgt(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':576}
cycles = (0, 2, 0)
class clgtb(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':720}
cycles = (0, 2, 0)
class clgth(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':712}
cycles = (0, 2, 0)
class clgt(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':704}
cycles = (0, 2, 0)
class bi(Instruction):
machine_inst = OPCD_A_D_E
params = {'OPCD':424}
cycles = (1, 4, 0)
class iret(Instruction):
machine_inst = OPCD_A_D_E
params = {'OPCD':426}
cycles = (1, 4, 0)
class bisled(Instruction):
machine_inst = OPCD_A_T_D_E
params = {'OPCD':427}
cycles = (1, 4, 0)
class bisl(Instruction):
machine_inst = OPCD_A_T_D_E
params = {'OPCD':425}
cycles = (1, 4, 0)
class biz(Instruction):
machine_inst = OPCD_A_T_D_E
params = {'OPCD':296}
cycles = (1, 4, 0)
class binz(Instruction):
machine_inst = OPCD_A_T_D_E
params = {'OPCD':297}
cycles = (1, 4, 0)
class bihz(Instruction):
machine_inst = OPCD_A_T_D_E
params = {'OPCD':294}
cycles = (1, 4, 0)
class bihnz(Instruction):
machine_inst = OPCD_A_T_D_E
params = {'OPCD':299}
cycles = (1, 4, 0)
# TODO - can we check that if P is set then RO is zero as required?
class hbr(DispatchInstruction):
cycles = (1, 15, 0)
dispatch = (
(OPCD_RO_A_P, {'OPCD':428}),
(OPCD_LBL9_A_P, {'OPCD':428}))
class fa(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':708}
cycles = (0, 6, 0)
class dfa(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':716}
cycles = (0, 13, 6)
class fs(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':709}
cycles = (0, 6, 0)
class dfs(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':717}
cycles = (0, 13, 6)
class fm(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':710}
cycles = (0, 6, 0)
class dfm(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':718}
cycles = (0, 13, 6)
class dfma(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':860}
cycles = (0, 13, 6)
class dfnms(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':862}
cycles = (0, 13, 6)
class dfms(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':861}
cycles = (0, 13, 6)
class dfnma(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':863}
cycles = (0, 13, 6)
class frest(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':440}
cycles = (1, 4, 0)
class frsqest(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':441}
cycles = (1, 4, 0)
class fi(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':980}
cycles = (0, 7, 0)
class frds(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':953}
cycles = (0, 13, 6)
class fesd(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':952}
cycles = (0, 13, 6)
class fceq(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':962}
cycles = (0, 2, 0)
class fcmeq(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':970}
cycles = (0, 2, 0)
class fcgt(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':706}
cycles = (0, 2, 0)
class fcmgt(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':714}
cycles = (0, 2, 0)
class fscrwr(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':954}
cycles = (0, 7, 0)
class fscrrd(Instruction):
machine_inst = OPCD_T
params = {'OPCD':920}
cycles = (0, 13, 6)
class stop(Instruction):
machine_inst = OPCD_STOP_SIG
params = {'OPCD':0}
cycles = (1, 4, 0)
class stopd(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':320}
cycles = (1, 4, 0)
class lnop(Instruction):
machine_inst = OPCD
params = {'OPCD':1}
cycles = (1, 0, 0)
class nop(Instruction):
machine_inst = OPCD_T
params = {'OPCD':513}
cycles = (0, 0, 0)
class sync(Instruction):
machine_inst = OPCD_CF
params = {'OPCD':2}
cycles = (1, 4, 0)
class dsync(Instruction):
machine_inst = OPCD
params = {'OPCD':3}
cycles = (1, 4, 0)
class mfspr(Instruction):
machine_inst = OPCD_SA_T
params = {'OPCD':12}
cycles = (1, 6, 0)
class mtspr(Instruction):
machine_inst = OPCD_SA_T
params = {'OPCD':268}
cycles = (1, 6, 0)
class rdch(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':13}
cycles = (1, 6, 0)
class rchcnt(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':15}
cycles = (1, 6, 0)
class wrch(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':269}
cycles = (1, 6, 0)
class mpya(Instruction):
machine_inst = OPCD_T_B_A_C
params = {'OPCD':12}
cycles = (0, 7, 0)
class selb(Instruction):
machine_inst = OPCD_T_B_A_C
params = {'OPCD':8}
cycles = (0, 2, 0)
class shufb(Instruction):
machine_inst = OPCD_T_B_A_C
params = {'OPCD':11}
cycles = (1, 4, 0)
class fma(Instruction):
machine_inst = OPCD_T_B_A_C
params = {'OPCD':14}
cycles = (0, 6, 0)
class fnms(Instruction):
machine_inst = OPCD_T_B_A_C
params = {'OPCD':13}
cycles = (0, 6, 0)
class fms(Instruction):
machine_inst = OPCD_T_B_A_C
params = {'OPCD':15}
cycles = (0, 6, 0)
class cbd(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':500}
cycles = (1, 4, 0)
class chd(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':501}
cycles = (1, 4, 0)
class cwd(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':502}
cycles = (1, 4, 0)
class cdd(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':503}
cycles = (1, 4, 0)
class shlhi(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':127}
cycles = (0, 4, 0)
class shli(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':123}
cycles = (0, 4, 0)
class shlqbii(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':507}
cycles = (1, 4, 0)
class shlqbyi(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':511}
cycles = (1, 4, 0)
class rothi(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':124}
cycles = (0, 4, 0)
class roti(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':120}
cycles = (0, 4, 0)
class rotqbyi(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':508}
cycles = (1, 4, 0)
class rotqbii(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':504}
cycles = (1, 4, 0)
class rothmi(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':125}
cycles = (0, 4, 0)
class rotmi(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':121}
cycles = (0, 4, 0)
class rotqmbyi(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':509}
cycles = (1, 4, 0)
class rotqmbii(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':505}
cycles = (1, 4, 0)
class rotmahi(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':126}
cycles = (0, 4, 0)
class rotmai(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':122}
cycles = (0, 4, 0)
class csflt(Instruction):
machine_inst = OPCD_I8_A_T
params = {'OPCD':474}
cycles = (0, 7, 0)
class cflts(Instruction):
machine_inst = OPCD_I8_A_T
params = {'OPCD':472}
cycles = (0, 7, 0)
class cuflt(Instruction):
machine_inst = OPCD_I8_A_T
params = {'OPCD':475}
cycles = (0, 7, 0)
class cfltu(Instruction):
machine_inst = OPCD_I8_A_T
params = {'OPCD':473}
cycles = (0, 7, 0)
class lqd(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':52}
cycles = (1, 6, 0)
class stqd(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':36}
cycles = (1, 6, 0)
class ahi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':29}
cycles = (0, 2, 0)
class ai(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':28}
cycles = (0, 2, 0)
class sfhi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':13}
cycles = (0, 2, 0)
class sfi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':12}
cycles = (0, 2, 0)
class mpyi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':116}
cycles = (0, 7, 0)
class mpyui(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':117}
cycles = (0, 7, 0)
class andbi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':22}
cycles = (0, 2, 0)
class andhi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':21}
cycles = (0, 2, 0)
class andi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':20}
cycles = (0, 2, 0)
class orbi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':6}
cycles = (0, 2, 0)
class orhi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':5}
cycles = (0, 2, 0)
class ori(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':4}
cycles = (0, 2, 0)
class xorbi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':70}
cycles = (0, 2, 0)
class xorhi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':69}
cycles = (0, 2, 0)
class xori(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':68}
cycles = (0, 2, 0)
class heqi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':127}
cycles = (0, 2, 0)
class hgti(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':79}
cycles = (0, 2, 0)
class hlgti(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':95}
cycles = (0, 2, 0)
class ceqbi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':126}
cycles = (0, 2, 0)
class ceqhi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':125}
cycles = (0, 2, 0)
class ceqi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':124}
cycles = (0, 2, 0)
class cgtbi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':78}
cycles = (0, 2, 0)
class cgthi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':77}
cycles = (0, 2, 0)
class cgti(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':76}
cycles = (0, 2, 0)
class clgtbi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':94}
cycles = (0, 2, 0)
class clgthi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':93}
cycles = (0, 2, 0)
class clgti(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':92}
cycles = (0, 2, 0)
class lqa(Instruction):
machine_inst = OPCD_I16_T
params = {'OPCD':97}
cycles = (1, 6, 0)
class lqr(Instruction):
machine_inst = OPCD_I16_T
params = {'OPCD':103}
cycles = (1, 6, 0)
class stqa(Instruction):
machine_inst = OPCD_I16_T
params = {'OPCD':65}
cycles = (1, 6, 0)
class stqr(Instruction):
machine_inst = OPCD_I16_T
params = {'OPCD':71}
cycles = (1, 6, 0)
class ilh(Instruction):
machine_inst = OPCD_I16_T
params = {'OPCD':131}
cycles = (0, 2, 0)
class ilhu(Instruction):
machine_inst = OPCD_I16_T
params = {'OPCD':130}
cycles = (0, 2, 0)
class il(Instruction):
machine_inst = OPCD_I16_T
params = {'OPCD':129}
cycles = (0, 2, 0)
class iohl(Instruction):
machine_inst = OPCD_I16_T
params = {'OPCD':193}
cycles = (0, 2, 0)
class fsmbi(Instruction):
machine_inst = OPCD_I16_T
params = {'OPCD':101}
cycles = (1, 4, 0)
class br(DispatchInstruction):
cycles = (1, 4, 0)
dispatch = (
(OPCD_I16, {'OPCD':100}),
(OPCD_LBL16, {'OPCD':100}))
# TODO - how can I do absolute branches?
class bra(Instruction):
machine_inst = OPCD_I16
params = {'OPCD':96}
cycles = (1, 4, 0)
# TODO - I16 has two zero bits appended, do I handle this correctly?
# What is the correct way, anyway?
class brsl(DispatchInstruction):
cycles = (1, 4, 0)
dispatch = (
(OPCD_I16_T, {'OPCD':102}),
(OPCD_LBL16_T, {'OPCD':102}))
class brasl(Instruction):
machine_inst = OPCD_I16_T
params = {'OPCD':98}
cycles = (1, 4, 0)
class brnz(DispatchInstruction):
cycles = (1, 4, 0)
dispatch = (
(OPCD_I16_T, {'OPCD':66}),
(OPCD_LBL16_T, {'OPCD':66}))
class brz(DispatchInstruction):
cycles = (1, 4, 0)
dispatch = (
(OPCD_I16_T, {'OPCD':64}),
(OPCD_LBL16_T, {'OPCD':64}))
class brhnz(DispatchInstruction):
cycles = (1, 4, 0)
dispatch = (
(OPCD_I16, {'OPCD':70}),
(OPCD_LBL16, {'OPCD':70}))
class brhz(DispatchInstruction):
cycles = (1, 4, 0)
dispatch = (
(OPCD_I16, {'OPCD':68}),
(OPCD_LBL16, {'OPCD':68}))
class hbra(Instruction):
machine_inst = OPCD_LBL9_I16
params = {'OPCD':8}
cycles = (1, 15, 0)
class hbrr(DispatchInstruction):
cycles = (1, 15, 0)
dispatch = (
(OPCD_ROA_I16, {'OPCD':9}),
(OPCD_LBL9_LBL16, {'OPCD':9}))
class ila(Instruction):
machine_inst = OPCD_I18_T
params = {'OPCD':33}
cycles = (0, 2, 0)
|
matthiaskramm/corepy
|
corepy/arch/spu/isa/spu_isa.py
|
Python
|
bsd-3-clause
| 22,294 |
/*
* Copyright (c) 2012, United States Government, as represented by the Secretary of Health and Human Services.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above
* copyright notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of the United States Government nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE UNITED STATES GOVERNMENT BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package gov.hhs.fha.nhinc.patientdiscovery.inbound.deferred.request;
import gov.hhs.fha.nhinc.aspect.InboundProcessingEvent;
import gov.hhs.fha.nhinc.common.nhinccommon.AssertionType;
import gov.hhs.fha.nhinc.nhinclib.NhincConstants;
import gov.hhs.fha.nhinc.patientdiscovery.PatientDiscoveryAuditor;
import gov.hhs.fha.nhinc.patientdiscovery.adapter.deferred.request.proxy.AdapterPatientDiscoveryDeferredReqProxy;
import gov.hhs.fha.nhinc.patientdiscovery.adapter.deferred.request.proxy.AdapterPatientDiscoveryDeferredReqProxyObjectFactory;
import gov.hhs.fha.nhinc.patientdiscovery.aspect.MCCIIN000002UV01EventDescriptionBuilder;
import gov.hhs.fha.nhinc.patientdiscovery.aspect.PRPAIN201305UV02EventDescriptionBuilder;
import org.hl7.v3.MCCIIN000002UV01;
import org.hl7.v3.PRPAIN201305UV02;
public abstract class AbstractInboundPatientDiscoveryDeferredRequest implements InboundPatientDiscoveryDeferredRequest {
private final AdapterPatientDiscoveryDeferredReqProxyObjectFactory adapterFactory;
public AbstractInboundPatientDiscoveryDeferredRequest(AdapterPatientDiscoveryDeferredReqProxyObjectFactory factory) {
adapterFactory = factory;
}
abstract MCCIIN000002UV01 process(PRPAIN201305UV02 request, AssertionType assertion);
abstract PatientDiscoveryAuditor getAuditLogger();
/**
* Processes the PD Deferred request message. This call will audit the message and send it to the Nhin.
*
* @param request
* @param assertion
* @return MCCIIN000002UV01
*/
@InboundProcessingEvent(beforeBuilder = PRPAIN201305UV02EventDescriptionBuilder.class,
afterReturningBuilder = MCCIIN000002UV01EventDescriptionBuilder.class,
serviceType = "Patient Discovery Deferred Request",
version = "1.0")
public MCCIIN000002UV01 respondingGatewayPRPAIN201305UV02(PRPAIN201305UV02 request, AssertionType assertion) {
auditRequestFromNhin(request, assertion);
MCCIIN000002UV01 response = process(request, assertion);
auditResponseToNhin(response, assertion);
return response;
}
protected MCCIIN000002UV01 sendToAdapter(PRPAIN201305UV02 request, AssertionType assertion) {
AdapterPatientDiscoveryDeferredReqProxy proxy = adapterFactory.getAdapterPatientDiscoveryDeferredReqProxy();
return proxy.processPatientDiscoveryAsyncReq(request, assertion);
}
private void auditRequestFromNhin(PRPAIN201305UV02 request, AssertionType assertion) {
getAuditLogger().auditNhinDeferred201305(request, assertion, NhincConstants.AUDIT_LOG_INBOUND_DIRECTION);
}
private void auditResponseToNhin(MCCIIN000002UV01 response, AssertionType assertion) {
getAuditLogger().auditAck(response, assertion, NhincConstants.AUDIT_LOG_OUTBOUND_DIRECTION,
NhincConstants.AUDIT_LOG_NHIN_INTERFACE);
}
protected void auditRequestToAdapter(PRPAIN201305UV02 request, AssertionType assertion) {
getAuditLogger().auditAdapterDeferred201305(request, assertion, NhincConstants.AUDIT_LOG_OUTBOUND_DIRECTION);
}
protected void auditResponseFromAdapter(MCCIIN000002UV01 response, AssertionType assertion) {
getAuditLogger().auditAck(response, assertion, NhincConstants.AUDIT_LOG_INBOUND_DIRECTION,
NhincConstants.AUDIT_LOG_ADAPTER_INTERFACE);
}
}
|
sailajaa/CONNECT
|
Product/Production/Services/PatientDiscoveryCore/src/main/java/gov/hhs/fha/nhinc/patientdiscovery/inbound/deferred/request/AbstractInboundPatientDiscoveryDeferredRequest.java
|
Java
|
bsd-3-clause
| 5,048 |
/*
-- MAGMA (version 2.1.0) --
Univ. of Tennessee, Knoxville
Univ. of California, Berkeley
Univ. of Colorado, Denver
@date August 2016
@author Stan Tomov
@author Hartwig Anzt
@precisions normal z -> s d c
*/
#include "magmasparse_internal.h"
#define PRECISION_z
#define COMPLEX
#define RTOLERANCE lapackf77_dlamch( "E" )
#define ATOLERANCE lapackf77_dlamch( "E" )
/**
Purpose
-------
Solves an eigenvalue problem
A * X = evalues X
where A is a complex sparse matrix stored in the GPU memory.
X and B are complex vectors stored on the GPU memory.
This is a GPU implementation of the LOBPCG method.
This method allocates all required memory space inside the routine.
Also, the memory is not allocated as one big chunk, but seperatly for
the different blocks. This allows to use texture also for large matrices.
Arguments
---------
@param[in]
A magma_z_matrix
input matrix A
@param[in,out]
solver_par magma_z_solver_par*
solver parameters
@param[in,out]
precond_par magma_z_precond_par*
preconditioner parameters
@param[in]
queue magma_queue_t
Queue to execute in.
@ingroup magmasparse_zheev
********************************************************************/
extern "C" magma_int_t
magma_zlobpcg(
magma_z_matrix A,
magma_z_solver_par *solver_par,
magma_z_preconditioner *precond_par,
magma_queue_t queue )
{
magma_int_t info = 0;
#define residualNorms(i,iter) ( residualNorms + (i) + (iter)*n )
#define SWAP(x, y) { pointer = x; x = y; y = pointer; }
#define hresidualNorms(i,iter) (hresidualNorms + (i) + (iter)*n )
#define gramA( m, n) (gramA + (m) + (n)*ldgram)
#define gramB( m, n) (gramB + (m) + (n)*ldgram)
#define gevectors(m, n) (gevectors + (m) + (n)*ldgram)
#define h_gramB( m, n) (h_gramB + (m) + (n)*ldgram)
#define magma_z_bspmv_tuned(m, n, alpha, A, X, beta, AX, queue) { \
magma_z_matrix x={Magma_CSR}, ax={Magma_CSR}; \
x.memory_location = Magma_DEV; x.num_rows = m; x.num_cols = n; x.major = MagmaColMajor; x.nnz = m*n; x.dval = X; x.storage_type = Magma_DENSE; \
ax.memory_location= Magma_DEV; ax.num_rows = m; ax.num_cols = n; ax.major = MagmaColMajor; ax.nnz = m*n; ax.dval = AX; ax.storage_type = Magma_DENSE; \
CHECK( magma_z_spmv(alpha, A, x, beta, ax, queue )); \
}
//**************************************************************
// %Memory allocation for the eigenvectors, eigenvalues, and workspace
solver_par->solver = Magma_LOBPCG;
magma_int_t m = A.num_rows;
magma_int_t n = (solver_par->num_eigenvalues);
magmaDoubleComplex *blockX = solver_par->eigenvectors;
double *evalues = solver_par->eigenvalues;
solver_par->numiter = 0;
solver_par->spmv_count = 0;
magmaDoubleComplex *dwork=NULL, *hwork=NULL;
magmaDoubleComplex *blockP=NULL, *blockAP=NULL, *blockR=NULL, *blockAR=NULL, *blockAX=NULL, *blockW=NULL;
magmaDoubleComplex *gramA=NULL, *gramB=NULL, *gramM=NULL;
magmaDoubleComplex *gevectors=NULL, *h_gramB=NULL;
dwork = NULL;
hwork = NULL;
blockP = NULL;
blockR = NULL;
blockAP = NULL;
blockAR = NULL;
blockAX = NULL;
blockW = NULL;
gramA = NULL;
gramB = NULL;
gramM = NULL;
gevectors = NULL;
h_gramB = NULL;
magmaDoubleComplex *pointer, *origX = blockX;
double *eval_gpu=NULL;
magma_int_t iterationNumber, cBlockSize, restart = 1, iter;
//Chronometry
real_Double_t tempo1, tempo2;
magma_int_t lwork = max( 2*n+n*magma_get_dsytrd_nb(n),
1 + 6*3*n + 2* 3*n* 3*n);
magma_int_t *iwork={0}, liwork = 15*n+9;
magma_int_t gramDim, ldgram = 3*n, ikind = 3;
magmaDoubleComplex *hW={0};
// === Set solver parameters ===
double residualTolerance = solver_par->rtol;
magma_int_t maxIterations = solver_par->maxiter;
double tmp;
double r0=0; // set in 1st iteration
// === Set some constants & defaults ===
magmaDoubleComplex c_zero = MAGMA_Z_ZERO;
magmaDoubleComplex c_one = MAGMA_Z_ONE;
magmaDoubleComplex c_neg_one = MAGMA_Z_NEG_ONE;
double *residualNorms={0}, *condestGhistory={0}, condestG={0};
double *gevalues={0};
magma_int_t *activeMask={0};
double *hresidualNorms={0};
#ifdef COMPLEX
double *rwork={0};
magma_int_t lrwork = 1 + 5*(3*n) + 2*(3*n)*(3*n);
CHECK( magma_dmalloc_cpu(&rwork, lrwork));
#endif
CHECK( magma_zmalloc_pinned( &hwork , lwork ));
CHECK( magma_zmalloc( &blockAX , m*n ));
CHECK( magma_zmalloc( &blockAR , m*n ));
CHECK( magma_zmalloc( &blockAP , m*n ));
CHECK( magma_zmalloc( &blockR , m*n ));
CHECK( magma_zmalloc( &blockP , m*n ));
CHECK( magma_zmalloc( &blockW , m*n ));
CHECK( magma_zmalloc( &dwork , m*n ));
CHECK( magma_dmalloc( &eval_gpu , 3*n ));
//**********************************************************+
// === Check some parameters for possible quick exit ===
solver_par->info = MAGMA_SUCCESS;
if (m < 2)
info = MAGMA_DIVERGENCE;
else if (n > m)
info = MAGMA_SLOW_CONVERGENCE;
if (solver_par->info != 0) {
magma_xerbla( __func__, -(info) );
goto cleanup;
}
solver_par->info = info; // local info variable;
// === Allocate GPU memory for the residual norms' history ===
CHECK( magma_dmalloc(&residualNorms, (maxIterations+1) * n));
CHECK( magma_malloc( (void **)&activeMask, (n+1) * sizeof(magma_int_t) ));
// === Allocate CPU work space ===
CHECK( magma_dmalloc_cpu(&condestGhistory, maxIterations+1));
CHECK( magma_dmalloc_cpu(&gevalues, 3 * n));
CHECK( magma_malloc_cpu((void **)&iwork, liwork * sizeof(magma_int_t)));
CHECK( magma_zmalloc_pinned(&hW, n*n));
CHECK( magma_zmalloc_pinned(&gevectors, 9*n*n));
CHECK( magma_zmalloc_pinned(&h_gramB , 9*n*n));
// === Allocate GPU workspace ===
CHECK( magma_zmalloc(&gramM, n * n));
CHECK( magma_zmalloc(&gramA, 9 * n * n));
CHECK( magma_zmalloc(&gramB, 9 * n * n));
// === Set activemask to one ===
for(magma_int_t k =0; k<n; k++){
iwork[k]=1;
}
magma_setmatrix(n, 1, sizeof(magma_int_t), iwork, n , activeMask, n, queue);
#if defined(PRECISION_s)
ikind = 3;
#endif
// === Make the initial vectors orthonormal ===
magma_zgegqr_gpu(ikind, m, n, blockX, m, dwork, hwork, &info );
//magma_zorthomgs( m, n, blockX, queue );
magma_z_bspmv_tuned(m, n, c_one, A, blockX, c_zero, blockAX, queue );
solver_par->spmv_count++;
// === Compute the Gram matrix = (X, AX) & its eigenstates ===
magma_zgemm( MagmaConjTrans, MagmaNoTrans, n, n, m,
c_one, blockX, m, blockAX, m, c_zero, gramM, n, queue );
magma_zheevd_gpu( MagmaVec, MagmaUpper,
n, gramM, n, evalues, hW, n, hwork, lwork,
#ifdef COMPLEX
rwork, lrwork,
#endif
iwork, liwork, &info );
// === Update X = X * evectors ===
magma_zgemm( MagmaNoTrans, MagmaNoTrans, m, n, n,
c_one, blockX, m, gramM, n, c_zero, blockW, m, queue );
SWAP(blockW, blockX);
// === Update AX = AX * evectors ===
magma_zgemm( MagmaNoTrans, MagmaNoTrans, m, n, n,
c_one, blockAX, m, gramM, n, c_zero, blockW, m, queue );
SWAP(blockW, blockAX);
condestGhistory[1] = 7.82;
tempo1 = magma_sync_wtime( queue );
// === Main LOBPCG loop ============================================================
for(iterationNumber = 1; iterationNumber < maxIterations; iterationNumber++)
{
// === compute the residuals (R = Ax - x evalues )
magmablas_zlacpy( MagmaFull, m, n, blockAX, m, blockR, m, queue );
/*
for(magma_int_t i=0; i<n; i++) {
magma_zaxpy( m, MAGMA_Z_MAKE(-evalues[i],0), blockX+i*m, 1, blockR+i*m, 1, queue );
}
*/
magma_dsetmatrix( 3*n, 1, evalues, 3*n, eval_gpu, 3*n, queue );
CHECK( magma_zlobpcg_res( m, n, eval_gpu, blockX, blockR, eval_gpu, queue ));
magmablas_dznrm2_cols( m, n, blockR, m, residualNorms(0, iterationNumber), queue );
// === remove the residuals corresponding to already converged evectors
CHECK( magma_zcompact(m, n, blockR, m,
residualNorms(0, iterationNumber), residualTolerance,
activeMask, &cBlockSize, queue ));
if (cBlockSize == 0)
break;
// === apply a preconditioner P to the active residulas: R_new = P R_old
// === for now set P to be identity (no preconditioner => nothing to be done )
//magmablas_zlacpy( MagmaFull, m, cBlockSize, blockR, m, blockW, m, queue );
//SWAP(blockW, blockR);
// preconditioner
magma_z_matrix bWv={Magma_CSR}, bRv={Magma_CSR};
bWv.memory_location = Magma_DEV; bWv.num_rows = m; bWv.num_cols = cBlockSize; bWv.major = MagmaColMajor; bWv.nnz = m*cBlockSize; bWv.dval = blockW;
bRv.memory_location = Magma_DEV; bRv.num_rows = m; bRv.num_cols = cBlockSize; bRv.major = MagmaColMajor; bRv.nnz = m*cBlockSize; bRv.dval = blockR;
CHECK( magma_z_applyprecond_left( MagmaNoTrans, A, bRv, &bWv, precond_par, queue ));
CHECK( magma_z_applyprecond_right( MagmaNoTrans, A, bWv, &bRv, precond_par, queue ));
// === make the preconditioned residuals orthogonal to X
if( precond_par->solver != Magma_NONE){
magma_zgemm( MagmaConjTrans, MagmaNoTrans, n, cBlockSize, m,
c_one, blockX, m, blockR, m, c_zero, gramB(0,0), ldgram, queue );
magma_zgemm( MagmaNoTrans, MagmaNoTrans, m, cBlockSize, n,
c_neg_one, blockX, m, gramB(0,0), ldgram, c_one, blockR, m, queue );
}
// === make the active preconditioned residuals orthonormal
magma_zgegqr_gpu(ikind, m, cBlockSize, blockR, m, dwork, hwork, &info );
#if defined(PRECISION_s)
// re-orthogonalization
SWAP(blockX, dwork);
magma_zgegqr_gpu(ikind, m, cBlockSize, blockR, m, dwork, hwork, &info );
#endif
//magma_zorthomgs( m, cBlockSize, blockR, queue );
// === compute AR
magma_z_bspmv_tuned(m, cBlockSize, c_one, A, blockR, c_zero, blockAR, queue );
solver_par->spmv_count++;
if (!restart) {
// === compact P & AP as well
CHECK( magma_zcompactActive(m, n, blockP, m, activeMask, queue ));
CHECK( magma_zcompactActive(m, n, blockAP, m, activeMask, queue ));
/*
// === make P orthogonal to X ?
magma_zgemm( MagmaConjTrans, MagmaNoTrans, n, cBlockSize, m,
c_one, blockX, m, blockP, m, c_zero, gramB(0,0), ldgram, queue );
magma_zgemm( MagmaNoTrans, MagmaNoTrans, m, cBlockSize, n,
c_neg_one, blockX, m, gramB(0,0), ldgram, c_one, blockP, m, queue );
// === make P orthogonal to R ?
magma_zgemm( MagmaConjTrans, MagmaNoTrans, cBlockSize, cBlockSize, m,
c_one, blockR, m, blockP, m, c_zero, gramB(0,0), ldgram, queue );
magma_zgemm( MagmaNoTrans, MagmaNoTrans, m, cBlockSize, cBlockSize,
c_neg_one, blockR, m, gramB(0,0), ldgram, c_one, blockP, m, queue );
*/
// === Make P orthonormal & properly change AP (without multiplication by A)
magma_zgegqr_gpu(ikind, m, cBlockSize, blockP, m, dwork, hwork, &info );
#if defined(PRECISION_s)
// re-orthogonalization
SWAP(blockX, dwork);
magma_zgegqr_gpu(ikind, m, cBlockSize, blockP, m, dwork, hwork, &info );
#endif
//magma_zorthomgs( m, cBlockSize, blockP, queue );
//magma_z_bspmv_tuned(m, cBlockSize, c_one, A, blockP, c_zero, blockAP, queue );
magma_zsetmatrix( cBlockSize, cBlockSize, hwork, cBlockSize, dwork, cBlockSize, queue );
// replacement according to Stan
#if defined(PRECISION_s) || defined(PRECISION_d)
magmablas_ztrsm( MagmaRight, MagmaUpper, MagmaNoTrans, MagmaNonUnit,
m, cBlockSize, c_one, dwork, cBlockSize, blockAP, m, queue );
#else
magma_ztrsm( MagmaRight, MagmaUpper, MagmaNoTrans, MagmaNonUnit,
m, cBlockSize, c_one, dwork, cBlockSize, blockAP, m, queue );
#endif
}
iter = max( 1, iterationNumber - 10 - int(log(1.*cBlockSize)) );
double condestGmean = 0.;
for(magma_int_t i = 0; i<iterationNumber-iter+1; i++){
condestGmean += condestGhistory[i];
}
condestGmean = condestGmean / (iterationNumber-iter+1);
if (restart)
gramDim = n+cBlockSize;
else
gramDim = n+2*cBlockSize;
/* --- The Raileight-Ritz method for [X R P] -----------------------
[ X R P ]' [AX AR AP] y = evalues [ X R P ]' [ X R P ], i.e.,
GramA GramB
/ X'AX X'AR X'AP \ / X'X X'R X'P \
| R'AX R'AR R'AP | y = evalues | R'X R'R R'P |
\ P'AX P'AR P'AP / \ P'X P'R P'P /
----------------------------------------------------------------- */
// === assemble GramB; first, set it to I
magmablas_zlaset( MagmaFull, ldgram, ldgram, c_zero, c_one, gramB, ldgram, queue ); // identity
if (!restart) {
magma_zgemm( MagmaConjTrans, MagmaNoTrans, cBlockSize, n, m,
c_one, blockP, m, blockX, m, c_zero, gramB(n+cBlockSize,0), ldgram, queue );
magma_zgemm( MagmaConjTrans, MagmaNoTrans, cBlockSize, cBlockSize, m,
c_one, blockP, m, blockR, m, c_zero, gramB(n+cBlockSize,n), ldgram, queue );
}
magma_zgemm( MagmaConjTrans, MagmaNoTrans, cBlockSize, n, m,
c_one, blockR, m, blockX, m, c_zero, gramB(n,0), ldgram, queue );
// === get GramB from the GPU to the CPU and compute its eigenvalues only
magma_zgetmatrix( gramDim, gramDim, gramB, ldgram, h_gramB, ldgram, queue );
lapackf77_zheev("N", "L", &gramDim, h_gramB, &ldgram, gevalues,
hwork, &lwork,
#ifdef COMPLEX
rwork,
#endif
&info);
// === check stability criteria if we need to restart
condestG = log10( gevalues[gramDim-1]/gevalues[0] ) + 1.;
if ((condestG/condestGmean>2 && condestG>2) || condestG>8) {
// Steepest descent restart for stability
restart=1;
printf("restart at step #%d\n", int(iterationNumber));
}
// === assemble GramA; first, set it to I
magmablas_zlaset( MagmaFull, ldgram, ldgram, c_zero, c_one, gramA, ldgram, queue ); // identity
magma_zgemm( MagmaConjTrans, MagmaNoTrans, cBlockSize, n, m,
c_one, blockR, m, blockAX, m, c_zero, gramA(n,0), ldgram, queue );
magma_zgemm( MagmaConjTrans, MagmaNoTrans, cBlockSize, cBlockSize, m,
c_one, blockR, m, blockAR, m, c_zero, gramA(n,n), ldgram, queue );
if (!restart) {
magma_zgemm( MagmaConjTrans, MagmaNoTrans, cBlockSize, n, m,
c_one, blockP, m, blockAX, m, c_zero,
gramA(n+cBlockSize,0), ldgram, queue );
magma_zgemm( MagmaConjTrans, MagmaNoTrans, cBlockSize, cBlockSize, m,
c_one, blockP, m, blockAR, m, c_zero,
gramA(n+cBlockSize,n), ldgram, queue );
magma_zgemm( MagmaConjTrans, MagmaNoTrans, cBlockSize, cBlockSize, m,
c_one, blockP, m, blockAP, m, c_zero,
gramA(n+cBlockSize,n+cBlockSize), ldgram, queue );
}
/*
// === Compute X' AX or just use the eigenvalues below ?
magma_zgemm( MagmaConjTrans, MagmaNoTrans, n, n, m,
c_one, blockX, m, blockAX, m, c_zero,
gramA(0,0), ldgram, queue );
*/
if (restart==0) {
magma_zgetmatrix( gramDim, gramDim, gramA, ldgram, gevectors, ldgram, queue );
}
else {
gramDim = n+cBlockSize;
magma_zgetmatrix( gramDim, gramDim, gramA, ldgram, gevectors, ldgram, queue );
}
for(magma_int_t k=0; k<n; k++)
*gevectors(k,k) = MAGMA_Z_MAKE(evalues[k], 0);
// === the previous eigensolver destroyed what is in h_gramB => must copy it again
magma_zgetmatrix( gramDim, gramDim, gramB, ldgram, h_gramB, ldgram, queue );
magma_int_t itype = 1;
lapackf77_zhegvd(&itype, "V", "L", &gramDim,
gevectors, &ldgram, h_gramB, &ldgram,
gevalues, hwork, &lwork,
#ifdef COMPLEX
rwork, &lrwork,
#endif
iwork, &liwork, &info);
for(magma_int_t k =0; k<n; k++)
evalues[k] = gevalues[k];
// === copy back the result to gramA on the GPU and use it for the updates
magma_zsetmatrix( gramDim, gramDim, gevectors, ldgram, gramA, ldgram, queue );
if (restart == 0) {
// === contribution from P to the new X (in new search direction P)
magma_zgemm( MagmaNoTrans, MagmaNoTrans, m, n, cBlockSize,
c_one, blockP, m, gramA(n+cBlockSize,0), ldgram, c_zero, dwork, m, queue );
SWAP(dwork, blockP);
// === contribution from R to the new X (in new search direction P)
magma_zgemm( MagmaNoTrans, MagmaNoTrans, m, n, cBlockSize,
c_one, blockR, m, gramA(n,0), ldgram, c_one, blockP, m, queue );
// === corresponding contribution from AP to the new AX (in AP)
magma_zgemm( MagmaNoTrans, MagmaNoTrans, m, n, cBlockSize,
c_one, blockAP, m, gramA(n+cBlockSize,0), ldgram, c_zero, dwork, m, queue );
SWAP(dwork, blockAP);
// === corresponding contribution from AR to the new AX (in AP)
magma_zgemm( MagmaNoTrans, MagmaNoTrans, m, n, cBlockSize,
c_one, blockAR, m, gramA(n,0), ldgram, c_one, blockAP, m, queue );
}
else {
// === contribution from R (only) to the new X
magma_zgemm( MagmaNoTrans, MagmaNoTrans, m, n, cBlockSize,
c_one, blockR, m, gramA(n,0), ldgram, c_zero, blockP, m, queue );
// === corresponding contribution from AR (only) to the new AX
magma_zgemm( MagmaNoTrans, MagmaNoTrans,m, n, cBlockSize,
c_one, blockAR, m, gramA(n,0), ldgram, c_zero, blockAP, m, queue );
}
// === contribution from old X to the new X + the new search direction P
magma_zgemm( MagmaNoTrans, MagmaNoTrans, m, n, n,
c_one, blockX, m, gramA, ldgram, c_zero, dwork, m, queue );
SWAP(dwork, blockX);
//magma_zaxpy( m*n, c_one, blockP, 1, blockX, 1, queue );
CHECK( magma_zlobpcg_maxpy( m, n, blockP, blockX, queue ));
// === corresponding contribution from old AX to new AX + AP
magma_zgemm( MagmaNoTrans, MagmaNoTrans, m, n, n,
c_one, blockAX, m, gramA, ldgram, c_zero, dwork, m, queue );
SWAP(dwork, blockAX);
//magma_zaxpy( m*n, c_one, blockAP, 1, blockAX, 1, queue );
CHECK( magma_zlobpcg_maxpy( m, n, blockAP, blockAX, queue ));
condestGhistory[iterationNumber+1]=condestG;
magma_dgetmatrix( 1, 1, residualNorms(0, iterationNumber), 1, &tmp, 1, queue );
if ( iterationNumber == 1 ) {
solver_par->init_res = tmp;
r0 = tmp * solver_par->rtol;
if ( r0 < ATOLERANCE )
r0 = ATOLERANCE;
}
solver_par->final_res = tmp;
if ( tmp < r0 ) {
break;
}
if (cBlockSize == 0) {
break;
}
if ( solver_par->verbose!=0 ) {
if ( iterationNumber%solver_par->verbose == 0 ) {
// double res;
// magma_zgetmatrix( 1, 1,
// (magmaDoubleComplex*)residualNorms(0, iterationNumber), 1,
// (magmaDoubleComplex*)&res, 1, queue );
//
// printf("Iteration %4d, CBS %4d, Residual: %10.7f\n",
// iterationNumber, cBlockSize, res);
printf("%4d-%2d ", int(iterationNumber), int(cBlockSize));
magma_dprint_gpu(1, n, residualNorms(0, iterationNumber), 1);
}
}
restart = 0;
} // === end for iterationNumber = 1,maxIterations =======================
// fill solver info
tempo2 = magma_sync_wtime( queue );
solver_par->runtime = (real_Double_t) tempo2-tempo1;
solver_par->numiter = iterationNumber;
if ( solver_par->numiter < solver_par->maxiter) {
info = MAGMA_SUCCESS;
} else if ( solver_par->init_res > solver_par->final_res )
info = MAGMA_SLOW_CONVERGENCE;
else
info = MAGMA_DIVERGENCE;
// =============================================================================
// === postprocessing;
// =============================================================================
// === compute the real AX and corresponding eigenvalues
magma_z_bspmv_tuned(m, n, c_one, A, blockX, c_zero, blockAX, queue );
magma_zgemm( MagmaConjTrans, MagmaNoTrans, n, n, m,
c_one, blockX, m, blockAX, m, c_zero, gramM, n, queue );
magma_zheevd_gpu( MagmaVec, MagmaUpper,
n, gramM, n, gevalues, dwork, n, hwork, lwork,
#ifdef COMPLEX
rwork, lrwork,
#endif
iwork, liwork, &info );
for(magma_int_t k =0; k<n; k++)
evalues[k] = gevalues[k];
// === update X = X * evectors
SWAP(blockX, dwork);
magma_zgemm( MagmaNoTrans, MagmaNoTrans, m, n, n,
c_one, dwork, m, gramM, n, c_zero, blockX, m, queue );
// === update AX = AX * evectors to compute the final residual
SWAP(blockAX, dwork);
magma_zgemm( MagmaNoTrans, MagmaNoTrans, m, n, n,
c_one, dwork, m, gramM, n, c_zero, blockAX, m, queue );
// === compute R = AX - evalues X
magmablas_zlacpy( MagmaFull, m, n, blockAX, m, blockR, m, queue );
for(magma_int_t i=0; i<n; i++)
magma_zaxpy( m, MAGMA_Z_MAKE(-evalues[i], 0), blockX+i*m, 1, blockR+i*m, 1, queue );
// === residualNorms[iterationNumber] = || R ||
magmablas_dznrm2_cols( m, n, blockR, m, residualNorms(0, iterationNumber), queue );
// === restore blockX if needed
if (blockX != origX)
magmablas_zlacpy( MagmaFull, m, n, blockX, m, origX, m, queue );
printf("Eigenvalues:\n");
for(magma_int_t i =0; i<n; i++)
printf("%e ", evalues[i]);
printf("\n\n");
printf("Final residuals:\n");
magma_dprint_gpu(1, n, residualNorms(0, iterationNumber), 1);
printf("\n\n");
//=== Prmagma_int_t residual history in a file for plotting ====
CHECK( magma_dmalloc_cpu(&hresidualNorms, (iterationNumber+1) * n));
magma_dgetmatrix( n, iterationNumber,
residualNorms, n,
hresidualNorms, n, queue );
solver_par->iter_res = *hresidualNorms(0, iterationNumber-1);
printf("Residuals are stored in file residualNorms\n");
printf("Plot the residuals using: myplot \n");
FILE *residuals_file;
residuals_file = fopen("residualNorms", "w");
for(magma_int_t i =1; i<iterationNumber; i++) {
for(magma_int_t j = 0; j<n; j++)
fprintf(residuals_file, "%f ", *hresidualNorms(j,i));
fprintf(residuals_file, "\n");
}
fclose(residuals_file);
cleanup:
magma_free_cpu(hresidualNorms);
// === free work space
magma_free( residualNorms );
magma_free_cpu( condestGhistory );
magma_free_cpu( gevalues );
magma_free_cpu( iwork );
magma_free_pinned( hW );
magma_free_pinned( gevectors );
magma_free_pinned( h_gramB );
magma_free( gramM );
magma_free( gramA );
magma_free( gramB );
magma_free( activeMask );
if (blockX != (solver_par->eigenvectors))
magma_free( blockX );
if (blockAX != (solver_par->eigenvectors))
magma_free( blockAX );
if (blockAR != (solver_par->eigenvectors))
magma_free( blockAR );
if (blockAP != (solver_par->eigenvectors))
magma_free( blockAP );
if (blockR != (solver_par->eigenvectors))
magma_free( blockR );
if (blockP != (solver_par->eigenvectors))
magma_free( blockP );
if (blockW != (solver_par->eigenvectors))
magma_free( blockW );
if (dwork != (solver_par->eigenvectors))
magma_free( dwork );
magma_free( eval_gpu );
magma_free_pinned( hwork );
#ifdef COMPLEX
magma_free_cpu( rwork );
rwork = NULL;
#endif
return info;
}
|
maxhutch/magma
|
sparse-iter/src/zlobpcg.cpp
|
C++
|
bsd-3-clause
| 27,130 |
import * as fs from "fs"
import * as path from "path"
import * as ts from "typescript"
const coffee = require("coffeescript")
const less = require("less")
import {argv} from "yargs"
import {collect_deps} from "./dependencies"
const mkCoffeescriptError = (error: any, file?: string) => {
const message = error.message
if (error.location == null) {
const text = [file || "<string>", message].join(":")
return {message, text}
} else {
const location = error.location
const line = location.first_line + 1
const column = location.first_column + 1
const text = [file || "<string>", line, column, message].join(":")
let markerLen = 2
if (location.first_line === location.last_line)
markerLen += location.last_column - location.first_column
const extract = error.code.split('\n')[line - 1]
const annotated = [
text,
" " + extract,
" " + Array(column).join(' ') + Array(markerLen).join('^'),
].join('\n')
return {message, line, column, text, extract, annotated}
}
}
const mkLessError = (error: any, file?: string) => {
const message = error.message
const line = error.line
const column = error.column + 1
const text = [file || "<string>", line, column, message].join(":")
const extract = error.extract[line]
const annotated = [text, " " + extract].join("\n")
return {message, line, column, text, extract, annotated}
}
const reply = (data: any) => {
process.stdout.write(JSON.stringify(data))
process.stdout.write("\n")
}
type Files = {[name: string]: string}
function compile_typescript(inputs: Files, bokehjs_dir: string): {outputs: Files, error?: string} {
const options: ts.CompilerOptions = {
noImplicitAny: true,
noImplicitThis: true,
noImplicitReturns: true,
noUnusedLocals: true,
noUnusedParameters: true,
strictNullChecks: true,
strictBindCallApply: false,
strictFunctionTypes: false,
strictPropertyInitialization: false,
alwaysStrict: true,
noErrorTruncation: true,
noEmitOnError: false,
declaration: false,
sourceMap: false,
importHelpers: false,
experimentalDecorators: true,
module: ts.ModuleKind.CommonJS,
moduleResolution: ts.ModuleResolutionKind.NodeJs,
target: ts.ScriptTarget.ES5,
lib: [
"lib.es5.d.ts",
"lib.dom.d.ts",
"lib.es2015.core.d.ts",
"lib.es2015.promise.d.ts",
"lib.es2015.symbol.d.ts",
"lib.es2015.iterable.d.ts",
],
types: [],
baseUrl: ".",
paths: {
"*": [
path.join(bokehjs_dir, "js/lib/*"),
path.join(bokehjs_dir, "js/types/*"),
],
},
}
const host: ts.CompilerHost = {
getDefaultLibFileName: () => "lib.d.ts",
getDefaultLibLocation: () => {
// bokeh/server/static or bokehjs/build
if (path.basename(bokehjs_dir) == "static")
return path.join(bokehjs_dir, "lib")
else
return path.join(path.dirname(bokehjs_dir), "node_modules/typescript/lib")
},
getCurrentDirectory: () => ts.sys.getCurrentDirectory(),
getDirectories: (path) => ts.sys.getDirectories(path),
getCanonicalFileName: (name) => ts.sys.useCaseSensitiveFileNames ? name : name.toLowerCase(),
useCaseSensitiveFileNames: () => ts.sys.useCaseSensitiveFileNames,
getNewLine: () => ts.sys.newLine,
fileExists(name: string): boolean {
return inputs[name] != null || ts.sys.fileExists(name)
},
readFile(name: string): string | undefined {
return inputs[name] != null ? inputs[name] : ts.sys.readFile(name)
},
writeFile(name, content): void {
ts.sys.writeFile(name, content)
},
getSourceFile(name: string, target: ts.ScriptTarget, _onError?: (message: string) => void) {
const source = inputs[name] != null ? inputs[name] : ts.sys.readFile(name)
return source !== undefined ? ts.createSourceFile(name, source, target) : undefined
},
}
const program = ts.createProgram(Object.keys(inputs), options, host)
const outputs: Files = {}
const emitted = program.emit(undefined, (name, output) => outputs[name] = output)
const diagnostics = ts.getPreEmitDiagnostics(program).concat(emitted.diagnostics)
if (diagnostics.length == 0)
return {outputs}
else {
const format_host: ts.FormatDiagnosticsHost = {
getCanonicalFileName: (path) => path,
getCurrentDirectory: ts.sys.getCurrentDirectory,
getNewLine: () => ts.sys.newLine,
}
const error = ts.formatDiagnosticsWithColorAndContext(
ts.sortAndDeduplicateDiagnostics(diagnostics), format_host)
return {outputs, error}
}
}
function compile_javascript(file: string, code: string): {output: string, error?: string} {
const result = ts.transpileModule(code, {
fileName: file,
reportDiagnostics: true,
compilerOptions: {
target: ts.ScriptTarget.ES5,
module: ts.ModuleKind.CommonJS,
},
})
const format_host: ts.FormatDiagnosticsHost = {
getCanonicalFileName: (path) => path,
getCurrentDirectory: ts.sys.getCurrentDirectory,
getNewLine: () => ts.sys.newLine,
}
const {outputText, diagnostics} = result
if (diagnostics == null || diagnostics.length == 0)
return {output: outputText}
else {
const error = ts.formatDiagnosticsWithColorAndContext(
ts.sortAndDeduplicateDiagnostics(diagnostics), format_host)
return {output: outputText, error}
}
}
function rename(p: string, options: {dir?: string, ext?: string}): string {
let {dir, name, ext} = path.parse(p)
if (options.dir != null)
dir = options.dir
if (options.ext != null)
ext = options.ext
return path.format({dir, name, ext})
}
function normalize(path: string): string {
return path.replace(/\\/g, "/")
}
const compile_and_resolve_deps = (input: {code: string, lang: string, file: string, bokehjs_dir: string}) => {
const {file, lang, bokehjs_dir} = input
let {code} = input
let output: string
switch (lang) {
case "typescript":
const inputs = {[normalize(file)]: code}
const result = compile_typescript(inputs, bokehjs_dir)
if (result.error == null)
output = result.outputs[normalize(rename(file, {ext: ".js"}))]
else
return reply({error: result.error})
break
case "coffeescript":
try {
code = coffee.compile(code, {bare: true, shiftLine: true})
} catch (error) {
return reply({error: mkCoffeescriptError(error, file)})
}
case "javascript": {
const result = compile_javascript(file, code)
if (result.error == null)
output = result.output
else
return reply({error: result.error})
break
}
case "less":
const options = {
paths: [path.dirname(file)],
compress: true,
ieCompat: false,
}
less.render(code, options, (error: any, output: any) => {
if (error != null)
reply({error: mkLessError(error, file)})
else
reply({code: output.css})
})
return
default:
throw new Error(`unsupported input type: ${lang}`)
}
const source = ts.createSourceFile(file, output, ts.ScriptTarget.ES5, true, ts.ScriptKind.JS)
const deps = collect_deps(source)
return reply({code: output, deps})
}
if (argv.file != null) {
const input = {
code: fs.readFileSync(argv.file as string, "utf-8"),
lang: (argv.lang as string | undefined) || "coffeescript",
file: argv.file as string,
bokehjs_dir: (argv.bokehjsDir as string | undefined) || "./build", // this is what bokeh.settings defaults to
}
compile_and_resolve_deps(input)
} else {
const stdin = process.stdin
stdin.resume()
stdin.setEncoding("utf-8")
let data = ""
stdin.on("data", (chunk: string) => data += chunk)
stdin.on("end", () => compile_and_resolve_deps(JSON.parse(data)))
}
|
stonebig/bokeh
|
bokehjs/src/compiler/compile.ts
|
TypeScript
|
bsd-3-clause
| 7,841 |
# -*- coding: utf-8 -*-
#
# Copyright (c) 2015, Alcatel-Lucent Inc, 2017 Nokia
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from .fetchers import NUPermissionsFetcher
from .fetchers import NUMetadatasFetcher
from .fetchers import NUGlobalMetadatasFetcher
from bambou import NURESTObject
class NUVMResync(NURESTObject):
""" Represents a VMResync in the VSD
Notes:
Provide information about the state of a VM resync request.
"""
__rest_name__ = "resync"
__resource_name__ = "resync"
## Constants
CONST_STATUS_IN_PROGRESS = "IN_PROGRESS"
CONST_ENTITY_SCOPE_GLOBAL = "GLOBAL"
CONST_STATUS_SUCCESS = "SUCCESS"
CONST_ENTITY_SCOPE_ENTERPRISE = "ENTERPRISE"
def __init__(self, **kwargs):
""" Initializes a VMResync instance
Notes:
You can specify all parameters while calling this methods.
A special argument named `data` will enable you to load the
object from a Python dictionary
Examples:
>>> vmresync = NUVMResync(id=u'xxxx-xxx-xxx-xxx', name=u'VMResync')
>>> vmresync = NUVMResync(data=my_dict)
"""
super(NUVMResync, self).__init__()
# Read/Write Attributes
self._last_request_timestamp = None
self._last_time_resync_initiated = None
self._last_updated_by = None
self._last_updated_date = None
self._embedded_metadata = None
self._entity_scope = None
self._creation_date = None
self._status = None
self._owner = None
self._external_id = None
self.expose_attribute(local_name="last_request_timestamp", remote_name="lastRequestTimestamp", attribute_type=int, is_required=False, is_unique=False)
self.expose_attribute(local_name="last_time_resync_initiated", remote_name="lastTimeResyncInitiated", attribute_type=int, is_required=False, is_unique=False)
self.expose_attribute(local_name="last_updated_by", remote_name="lastUpdatedBy", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="last_updated_date", remote_name="lastUpdatedDate", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="embedded_metadata", remote_name="embeddedMetadata", attribute_type=list, is_required=False, is_unique=False)
self.expose_attribute(local_name="entity_scope", remote_name="entityScope", attribute_type=str, is_required=False, is_unique=False, choices=[u'ENTERPRISE', u'GLOBAL'])
self.expose_attribute(local_name="creation_date", remote_name="creationDate", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="status", remote_name="status", attribute_type=str, is_required=False, is_unique=False, choices=[u'IN_PROGRESS', u'SUCCESS'])
self.expose_attribute(local_name="owner", remote_name="owner", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="external_id", remote_name="externalID", attribute_type=str, is_required=False, is_unique=True)
# Fetchers
self.permissions = NUPermissionsFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.metadatas = NUMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.global_metadatas = NUGlobalMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self._compute_args(**kwargs)
# Properties
@property
def last_request_timestamp(self):
""" Get last_request_timestamp value.
Notes:
Time of the last timestamp received
This attribute is named `lastRequestTimestamp` in VSD API.
"""
return self._last_request_timestamp
@last_request_timestamp.setter
def last_request_timestamp(self, value):
""" Set last_request_timestamp value.
Notes:
Time of the last timestamp received
This attribute is named `lastRequestTimestamp` in VSD API.
"""
self._last_request_timestamp = value
@property
def last_time_resync_initiated(self):
""" Get last_time_resync_initiated value.
Notes:
Time that the resync was initiated
This attribute is named `lastTimeResyncInitiated` in VSD API.
"""
return self._last_time_resync_initiated
@last_time_resync_initiated.setter
def last_time_resync_initiated(self, value):
""" Set last_time_resync_initiated value.
Notes:
Time that the resync was initiated
This attribute is named `lastTimeResyncInitiated` in VSD API.
"""
self._last_time_resync_initiated = value
@property
def last_updated_by(self):
""" Get last_updated_by value.
Notes:
ID of the user who last updated the object.
This attribute is named `lastUpdatedBy` in VSD API.
"""
return self._last_updated_by
@last_updated_by.setter
def last_updated_by(self, value):
""" Set last_updated_by value.
Notes:
ID of the user who last updated the object.
This attribute is named `lastUpdatedBy` in VSD API.
"""
self._last_updated_by = value
@property
def last_updated_date(self):
""" Get last_updated_date value.
Notes:
Time stamp when this object was last updated.
This attribute is named `lastUpdatedDate` in VSD API.
"""
return self._last_updated_date
@last_updated_date.setter
def last_updated_date(self, value):
""" Set last_updated_date value.
Notes:
Time stamp when this object was last updated.
This attribute is named `lastUpdatedDate` in VSD API.
"""
self._last_updated_date = value
@property
def embedded_metadata(self):
""" Get embedded_metadata value.
Notes:
Metadata objects associated with this entity. This will contain a list of Metadata objects if the API request is made using the special flag to enable the embedded Metadata feature. Only a maximum of Metadata objects is returned based on the value set in the system configuration.
This attribute is named `embeddedMetadata` in VSD API.
"""
return self._embedded_metadata
@embedded_metadata.setter
def embedded_metadata(self, value):
""" Set embedded_metadata value.
Notes:
Metadata objects associated with this entity. This will contain a list of Metadata objects if the API request is made using the special flag to enable the embedded Metadata feature. Only a maximum of Metadata objects is returned based on the value set in the system configuration.
This attribute is named `embeddedMetadata` in VSD API.
"""
self._embedded_metadata = value
@property
def entity_scope(self):
""" Get entity_scope value.
Notes:
Specify if scope of entity is Data center or Enterprise level
This attribute is named `entityScope` in VSD API.
"""
return self._entity_scope
@entity_scope.setter
def entity_scope(self, value):
""" Set entity_scope value.
Notes:
Specify if scope of entity is Data center or Enterprise level
This attribute is named `entityScope` in VSD API.
"""
self._entity_scope = value
@property
def creation_date(self):
""" Get creation_date value.
Notes:
Time stamp when this object was created.
This attribute is named `creationDate` in VSD API.
"""
return self._creation_date
@creation_date.setter
def creation_date(self, value):
""" Set creation_date value.
Notes:
Time stamp when this object was created.
This attribute is named `creationDate` in VSD API.
"""
self._creation_date = value
@property
def status(self):
""" Get status value.
Notes:
Status of the resync
"""
return self._status
@status.setter
def status(self, value):
""" Set status value.
Notes:
Status of the resync
"""
self._status = value
@property
def owner(self):
""" Get owner value.
Notes:
Identifies the user that has created this object.
"""
return self._owner
@owner.setter
def owner(self, value):
""" Set owner value.
Notes:
Identifies the user that has created this object.
"""
self._owner = value
@property
def external_id(self):
""" Get external_id value.
Notes:
External object ID. Used for integration with third party systems
This attribute is named `externalID` in VSD API.
"""
return self._external_id
@external_id.setter
def external_id(self, value):
""" Set external_id value.
Notes:
External object ID. Used for integration with third party systems
This attribute is named `externalID` in VSD API.
"""
self._external_id = value
|
nuagenetworks/vspk-python
|
vspk/v6/nuvmresync.py
|
Python
|
bsd-3-clause
| 11,928 |
package glasskey.spray.model
import glasskey.config.OAuthConfig
import spray.http.HttpHeader
trait OAuthAction {
import akka.actor.ActorSystem
import spray.client.pipelining._
import spray.http.HttpHeaders.RawHeader
import spray.http.HttpRequest
import spray.http.parser.HttpParser
import spray.httpx.encoding.{Deflate, Gzip}
import spray.httpx.unmarshalling.FromResponseUnmarshaller
import scala.concurrent.{ExecutionContext, Future}
implicit val system = ActorSystem()
implicit def executor: ExecutionContext = system.dispatcher
def getHeaderedPipeline[T](token: String,
id_token: Option[String] = None, addlHdrs : Option[List[HttpHeader]] = None)
(implicit evidence: FromResponseUnmarshaller[T]): HttpRequest => Future[T] =
(getHeaders(token, id_token, addlHdrs)
~> encode(Gzip)
~> sendReceive
~> decode(Deflate) ~> decode(Gzip)
~> unmarshal[T])
def getHeaders(accessToken: String, id_token: Option[String] = None,
addlHdrs : Option[List[HttpHeader]] = None): RequestTransformer =
getHeaders(accessToken, id_token, OAuthConfig.providerConfig.authHeaderName,
OAuthConfig.providerConfig.authHeaderPrefix,
OAuthConfig.providerConfig.idHeaderName,
OAuthConfig.providerConfig.idHeaderPrefix, addlHdrs)
def getHeaders(accessToken: String, id_token: Option[String], authHdrName: String, authHdrPrefix: String,
idHdrName: String, idHdrPrefix: String, addlHdrs : Option[List[HttpHeader]]): RequestTransformer =
addHeaders(getHttpHeaders(accessToken, id_token, authHdrName, authHdrPrefix,
idHdrName, idHdrPrefix, addlHdrs))
def getHttpHeaders(accessToken: String, id_token: Option[String], authHdrName: String, authHdrPrefix: String,
idHdrName: String, idHdrPrefix: String, addlHdrs : Option[List[HttpHeader]]): List[HttpHeader] = {
val hdrs = id_token match {
case Some(idTokenStr) =>
val authHeader = RawHeader(s"${authHdrName}", s"${authHdrPrefix} $accessToken")
val idTokenHeader = RawHeader(s"${idHdrName}", s"${idHdrPrefix} $idTokenStr")
List(
HttpParser.parseHeader(authHeader).left.flatMap(_ ⇒ Right(authHeader)).right.get,
HttpParser.parseHeader(idTokenHeader).left.flatMap(_ ⇒ Right(idTokenHeader)).right.get)
case None => val rawHeader = RawHeader(authHdrName, s"${authHdrPrefix}$accessToken")
List(HttpParser.parseHeader(rawHeader).left.flatMap(_ ⇒ Right(rawHeader)).right.get)
}
hdrs ++ addlHdrs.toList.flatten
}
}
|
MonsantoCo/glass-key
|
glass-key-spray/src/main/scala/glasskey/spray/model/OAuthAction.scala
|
Scala
|
bsd-3-clause
| 2,713 |
from mock import patch
from nose.tools import eq_
from helper import TestCase
import appvalidator.submain as submain
class TestSubmainPackage(TestCase):
@patch("appvalidator.submain.test_inner_package",
lambda x, z: "success")
def test_package_pass(self):
"Tests the test_package function with simple data"
self.setup_err()
name = "tests/resources/submain/install_rdf.xpi"
with open(name) as pack:
result = submain.test_package(self.err, pack, name)
self.assert_silent()
eq_(result, "success")
@patch("appvalidator.submain.test_inner_package",
lambda x, z: "success")
def test_package_corrupt(self):
"Tests the test_package function fails with a non-zip"
self.setup_err()
name = "tests/resources/junk.xpi"
with open(name) as pack:
result = submain.test_package(self.err, pack, name)
self.assert_failed()
def test_package_corrupt(self):
"Tests the test_package function fails with a corrupt file"
self.setup_err()
name = "tests/resources/corrupt.xpi"
result = submain.test_package(self.err, name, name)
self.assert_failed(with_errors=True, with_warnings=True)
|
mattbasta/perfalator
|
tests/test_submain_package.py
|
Python
|
bsd-3-clause
| 1,268 |
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta name="scaffolded-by" content="https://github.com/google/stagehand">
<title>futures_api</title>
<link rel="stylesheet" href="styles.css">
<link rel="icon" href="favicon.ico">
<script defer src="main.dart.js"></script>
</head>
<body>
<div id="output"></div>
</body>
</html>
|
dart-archive/dart-tutorials-samples
|
futures/futures-api/web/index.html
|
HTML
|
bsd-3-clause
| 494 |
Directory for interactive development of c2po plots.
Install deps via:
bundle install
then run
bundle exec guard
execute some of the code in `scratch.rb` and view your plots at `http://localhost:3000/`.
|
keminglabs/c2po-ruby
|
examples/README.markdown
|
Markdown
|
bsd-3-clause
| 220 |
package com.mistraltech.smogen.codegenerator.javabuilder;
public class InterfaceMethodBuilder extends MethodSignatureBuilder<InterfaceMethodBuilder> {
private InterfaceMethodBuilder() {
}
public static InterfaceMethodBuilder anInterfaceMethod() {
return new InterfaceMethodBuilder();
}
@Override
public String build(JavaBuilderContext context) {
return super.build(context) + ";";
}
}
|
mistraltechnologies/smogen
|
src/main/java/com/mistraltech/smogen/codegenerator/javabuilder/InterfaceMethodBuilder.java
|
Java
|
bsd-3-clause
| 432 |
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef COMPONENTS_CLOUD_DEVICES_COMMON_PRINTER_DESCRIPTION_H_
#define COMPONENTS_CLOUD_DEVICES_COMMON_PRINTER_DESCRIPTION_H_
#include <stdint.h>
#include <string>
#include <vector>
#include "base/logging.h"
#include "build/build_config.h"
#include "components/cloud_devices/common/description_items.h"
// Defines printer options, CDD and CJT items.
// https://developers.google.com/cloud-print/docs/cdd
namespace cloud_devices {
namespace printer {
struct SelectVendorCapabilityOption;
class SelectVendorCapabilityTraits;
typedef SelectionCapability<SelectVendorCapabilityOption,
SelectVendorCapabilityTraits>
SelectVendorCapability;
typedef std::string ContentType;
struct Copies {
// Default requested number of copies.
int32_t default_value = 1;
// Maximum number of copies supported, sourced from
// PrinterSemanticCapsAndDefaults.copies_max.
int32_t max_value = 1;
};
enum class DocumentSheetBack {
NORMAL,
ROTATED,
MANUAL_TUMBLE,
FLIPPED,
};
enum class PwgDocumentTypeSupported {
SGRAY_8 = 22,
SRGB_8 = 23,
};
struct PwgRasterConfig {
PwgRasterConfig();
~PwgRasterConfig();
std::vector<PwgDocumentTypeSupported> document_types_supported;
DocumentSheetBack document_sheet_back;
bool reverse_order_streaming;
bool rotate_all_pages;
};
class RangeVendorCapability {
public:
enum class ValueType {
FLOAT,
INTEGER,
};
RangeVendorCapability();
RangeVendorCapability(ValueType value_type,
const std::string& min_value,
const std::string& max_value);
RangeVendorCapability(ValueType value_type,
const std::string& min_value,
const std::string& max_value,
const std::string& default_value);
RangeVendorCapability(RangeVendorCapability&& other);
~RangeVendorCapability();
RangeVendorCapability& operator=(RangeVendorCapability&& other);
bool operator==(const RangeVendorCapability& other) const;
bool operator!=(const RangeVendorCapability& other) const {
return !(*this == other);
}
bool IsValid() const;
bool LoadFrom(const base::Value& dict);
void SaveTo(base::Value* dict) const;
private:
ValueType value_type_;
std::string min_value_;
std::string max_value_;
std::string default_value_;
DISALLOW_COPY_AND_ASSIGN(RangeVendorCapability);
};
struct SelectVendorCapabilityOption {
SelectVendorCapabilityOption();
SelectVendorCapabilityOption(const std::string& value,
const std::string& display_name);
~SelectVendorCapabilityOption();
bool IsValid() const;
bool operator==(const SelectVendorCapabilityOption& other) const;
bool operator!=(const SelectVendorCapabilityOption& other) const {
return !(*this == other);
}
std::string value;
std::string display_name;
};
class TypedValueVendorCapability {
public:
enum class ValueType {
BOOLEAN,
FLOAT,
INTEGER,
STRING,
};
TypedValueVendorCapability();
explicit TypedValueVendorCapability(ValueType value_type);
TypedValueVendorCapability(ValueType value_type,
const std::string& default_value);
TypedValueVendorCapability(TypedValueVendorCapability&& other);
~TypedValueVendorCapability();
TypedValueVendorCapability& operator=(TypedValueVendorCapability&& other);
bool operator==(const TypedValueVendorCapability& other) const;
bool operator!=(const TypedValueVendorCapability& other) const {
return !(*this == other);
}
bool IsValid() const;
bool LoadFrom(const base::Value& dict);
void SaveTo(base::Value* dict) const;
private:
ValueType value_type_;
std::string default_value_;
DISALLOW_COPY_AND_ASSIGN(TypedValueVendorCapability);
};
class VendorCapability {
public:
enum class Type {
NONE,
RANGE,
SELECT,
TYPED_VALUE,
};
VendorCapability();
VendorCapability(const std::string& id,
const std::string& display_name,
RangeVendorCapability range_capability);
VendorCapability(const std::string& id,
const std::string& display_name,
SelectVendorCapability select_capability);
VendorCapability(const std::string& id,
const std::string& display_name,
TypedValueVendorCapability typed_value_capability);
VendorCapability(VendorCapability&& other);
~VendorCapability();
bool operator==(const VendorCapability& other) const;
bool operator!=(const VendorCapability& other) const {
return !(*this == other);
}
bool IsValid() const;
bool LoadFrom(const base::Value& dict);
void SaveTo(base::Value* dict) const;
private:
void InternalCleanup();
Type type_;
std::string id_;
std::string display_name_;
// If the CDD is valid, exactly one of the capabilities has a value.
union {
RangeVendorCapability range_capability_;
SelectVendorCapability select_capability_;
TypedValueVendorCapability typed_value_capability_;
};
DISALLOW_COPY_AND_ASSIGN(VendorCapability);
};
enum class ColorType {
STANDARD_COLOR,
STANDARD_MONOCHROME,
CUSTOM_COLOR,
CUSTOM_MONOCHROME,
AUTO_COLOR,
};
struct Color {
Color();
explicit Color(ColorType type);
bool IsValid() const;
bool operator==(const Color& other) const;
bool operator!=(const Color& other) const { return !(*this == other); }
ColorType type;
std::string vendor_id;
std::string custom_display_name;
};
enum class DuplexType {
NO_DUPLEX,
LONG_EDGE,
SHORT_EDGE,
};
enum class OrientationType {
PORTRAIT,
LANDSCAPE,
AUTO_ORIENTATION,
};
enum class MarginsType {
NO_MARGINS,
STANDARD_MARGINS,
CUSTOM_MARGINS,
};
struct Margins {
Margins();
Margins(MarginsType type,
int32_t top_um,
int32_t right_um,
int32_t bottom_um,
int32_t left_um);
bool operator==(const Margins& other) const;
bool operator!=(const Margins& other) const { return !(*this == other); }
MarginsType type;
int32_t top_um;
int32_t right_um;
int32_t bottom_um;
int32_t left_um;
};
struct Dpi {
Dpi();
Dpi(int32_t horizontal, int32_t vertical);
bool IsValid() const;
bool operator==(const Dpi& other) const;
bool operator!=(const Dpi& other) const { return !(*this == other); }
int32_t horizontal;
int32_t vertical;
};
enum class FitToPageType {
NO_FITTING,
FIT_TO_PAGE,
GROW_TO_PAGE,
SHRINK_TO_PAGE,
FILL_PAGE,
};
enum class MediaType {
CUSTOM_MEDIA,
// North American standard sheet media names.
NA_INDEX_3X5,
NA_PERSONAL,
NA_MONARCH,
NA_NUMBER_9,
NA_INDEX_4X6,
NA_NUMBER_10,
NA_A2,
NA_NUMBER_11,
NA_NUMBER_12,
NA_5X7,
NA_INDEX_5X8,
NA_NUMBER_14,
NA_INVOICE,
NA_INDEX_4X6_EXT,
NA_6X9,
NA_C5,
NA_7X9,
NA_EXECUTIVE,
NA_GOVT_LETTER,
NA_GOVT_LEGAL,
NA_QUARTO,
NA_LETTER,
NA_FANFOLD_EUR,
NA_LETTER_PLUS,
NA_FOOLSCAP,
NA_LEGAL,
NA_SUPER_A,
NA_9X11,
NA_ARCH_A,
NA_LETTER_EXTRA,
NA_LEGAL_EXTRA,
NA_10X11,
NA_10X13,
NA_10X14,
NA_10X15,
NA_11X12,
NA_EDP,
NA_FANFOLD_US,
NA_11X15,
NA_LEDGER,
NA_EUR_EDP,
NA_ARCH_B,
NA_12X19,
NA_B_PLUS,
NA_SUPER_B,
NA_C,
NA_ARCH_C,
NA_D,
NA_ARCH_D,
NA_ASME_F,
NA_WIDE_FORMAT,
NA_E,
NA_ARCH_E,
NA_F,
// Chinese standard sheet media size names.
ROC_16K,
ROC_8K,
PRC_32K,
PRC_1,
PRC_2,
PRC_4,
PRC_5,
PRC_8,
PRC_6,
PRC_3,
PRC_16K,
PRC_7,
OM_JUURO_KU_KAI,
OM_PA_KAI,
OM_DAI_PA_KAI,
PRC_10,
// ISO standard sheet media size names.
ISO_A10,
ISO_A9,
ISO_A8,
ISO_A7,
ISO_A6,
ISO_A5,
ISO_A5_EXTRA,
ISO_A4,
ISO_A4_TAB,
ISO_A4_EXTRA,
ISO_A3,
ISO_A4X3,
ISO_A4X4,
ISO_A4X5,
ISO_A4X6,
ISO_A4X7,
ISO_A4X8,
ISO_A4X9,
ISO_A3_EXTRA,
ISO_A2,
ISO_A3X3,
ISO_A3X4,
ISO_A3X5,
ISO_A3X6,
ISO_A3X7,
ISO_A1,
ISO_A2X3,
ISO_A2X4,
ISO_A2X5,
ISO_A0,
ISO_A1X3,
ISO_A1X4,
ISO_2A0,
ISO_A0X3,
ISO_B10,
ISO_B9,
ISO_B8,
ISO_B7,
ISO_B6,
ISO_B6C4,
ISO_B5,
ISO_B5_EXTRA,
ISO_B4,
ISO_B3,
ISO_B2,
ISO_B1,
ISO_B0,
ISO_C10,
ISO_C9,
ISO_C8,
ISO_C7,
ISO_C7C6,
ISO_C6,
ISO_C6C5,
ISO_C5,
ISO_C4,
ISO_C3,
ISO_C2,
ISO_C1,
ISO_C0,
ISO_DL,
ISO_RA2,
ISO_SRA2,
ISO_RA1,
ISO_SRA1,
ISO_RA0,
ISO_SRA0,
// Japanese standard sheet media size names.
JIS_B10,
JIS_B9,
JIS_B8,
JIS_B7,
JIS_B6,
JIS_B5,
JIS_B4,
JIS_B3,
JIS_B2,
JIS_B1,
JIS_B0,
JIS_EXEC,
JPN_CHOU4,
JPN_HAGAKI,
JPN_YOU4,
JPN_CHOU2,
JPN_CHOU3,
JPN_OUFUKU,
JPN_KAHU,
JPN_KAKU2,
// Other metric standard sheet media size names.
OM_SMALL_PHOTO,
OM_ITALIAN,
OM_POSTFIX,
OM_LARGE_PHOTO,
OM_FOLIO,
OM_FOLIO_SP,
OM_INVITE,
};
struct Media {
Media();
explicit Media(MediaType type);
Media(MediaType type, int32_t width_um, int32_t height_um);
Media(const std::string& custom_display_name,
const std::string& vendor_id,
int32_t width_um,
int32_t height_um);
Media(const Media& other);
bool MatchBySize();
bool IsValid() const;
bool operator==(const Media& other) const;
bool operator!=(const Media& other) const { return !(*this == other); }
MediaType type;
int32_t width_um;
int32_t height_um;
bool is_continuous_feed;
std::string custom_display_name;
std::string vendor_id;
};
struct Interval {
Interval();
Interval(int32_t start, int32_t end);
explicit Interval(int32_t start);
bool operator==(const Interval& other) const;
bool operator!=(const Interval& other) const { return !(*this == other); }
int32_t start;
int32_t end;
};
typedef std::vector<Interval> PageRange;
class ContentTypeTraits;
class PwgRasterConfigTraits;
class VendorCapabilityTraits;
class ColorTraits;
class DuplexTraits;
class OrientationTraits;
class MarginsTraits;
class DpiTraits;
class FitToPageTraits;
class MediaTraits;
class PageRangeTraits;
class CollateTraits;
class CopiesCapabilityTraits;
class CopiesTicketItemTraits;
typedef ListCapability<ContentType, ContentTypeTraits> ContentTypesCapability;
typedef ValueCapability<PwgRasterConfig, PwgRasterConfigTraits>
PwgRasterConfigCapability;
typedef ListCapability<VendorCapability, VendorCapabilityTraits>
VendorCapabilities;
typedef SelectionCapability<Color, ColorTraits> ColorCapability;
typedef SelectionCapability<DuplexType, DuplexTraits> DuplexCapability;
typedef SelectionCapability<OrientationType, OrientationTraits>
OrientationCapability;
typedef SelectionCapability<Margins, MarginsTraits> MarginsCapability;
typedef SelectionCapability<Dpi, DpiTraits> DpiCapability;
typedef SelectionCapability<FitToPageType, FitToPageTraits> FitToPageCapability;
typedef SelectionCapability<Media, MediaTraits> MediaCapability;
typedef ValueCapability<Copies, class CopiesCapabilityTraits> CopiesCapability;
typedef EmptyCapability<class PageRangeTraits> PageRangeCapability;
typedef BooleanCapability<class CollateTraits> CollateCapability;
typedef BooleanCapability<class ReverseTraits> ReverseCapability;
#if defined(OS_CHROMEOS)
// This capability is not a part of standard CDD description. It's used for
// providing PIN printing opportunity in Chrome OS native printing.
typedef ValueCapability<bool, class PinTraits> PinCapability;
#endif // defined(OS_CHROMEOS)
typedef TicketItem<PwgRasterConfig, PwgRasterConfigTraits>
PwgRasterConfigTicketItem;
typedef TicketItem<Color, ColorTraits> ColorTicketItem;
typedef TicketItem<DuplexType, DuplexTraits> DuplexTicketItem;
typedef TicketItem<OrientationType, OrientationTraits> OrientationTicketItem;
typedef TicketItem<Margins, MarginsTraits> MarginsTicketItem;
typedef TicketItem<Dpi, DpiTraits> DpiTicketItem;
typedef TicketItem<FitToPageType, FitToPageTraits> FitToPageTicketItem;
typedef TicketItem<Media, MediaTraits> MediaTicketItem;
typedef TicketItem<int32_t, CopiesTicketItemTraits> CopiesTicketItem;
typedef TicketItem<PageRange, PageRangeTraits> PageRangeTicketItem;
typedef TicketItem<bool, CollateTraits> CollateTicketItem;
typedef TicketItem<bool, ReverseTraits> ReverseTicketItem;
} // namespace printer
} // namespace cloud_devices
#endif // COMPONENTS_CLOUD_DEVICES_COMMON_PRINTER_DESCRIPTION_H_
|
endlessm/chromium-browser
|
components/cloud_devices/common/printer_description.h
|
C
|
bsd-3-clause
| 12,413 |
class ProductTag < ActiveRecord::Base
has_attached_file :icon,
:url => "/assets/product_tags/:id/:basename.:extension",
:path => ":rails_root/public/assets/product_tags/:id/:basename.:extension"
validates :name, :presence => true, :uniqueness => true
has_many :products
end
|
secoint/spree_products_tags
|
app/models/product_tag.rb
|
Ruby
|
bsd-3-clause
| 323 |
"use strict"
function checkEnvironmentForConfig(config:Object) : Object {
let mentionBotEnvConfig;
try {
mentionBotEnvConfig = JSON.parse(process.env.MENTION_BOT_CONFIG);
} catch(e) {
mentionBotEnvConfig = {};
}
return Object.keys(config).reduce((previousValue, key) => {
let defaultConfigValue = config[key];
let environmentVariable = mentionBotEnvConfig[key];
let configElement = {};
configElement[key] = environmentVariable === undefined ? defaultConfigValue
: environmentVariable;
return {...previousValue, ...configElement};
}, {});
}
module.exports = {
checkEnvironmentForConfig
}
|
ifuller1/mention-bot
|
environment.js
|
JavaScript
|
bsd-3-clause
| 641 |
// Copyright 2011 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "cc/resources/content_layer_updater.h"
#include "base/debug/trace_event.h"
#include "base/time.h"
#include "cc/debug/rendering_stats_instrumentation.h"
#include "cc/resources/layer_painter.h"
#include "third_party/skia/include/core/SkCanvas.h"
#include "third_party/skia/include/core/SkPaint.h"
#include "third_party/skia/include/core/SkRect.h"
#include "third_party/skia/include/core/SkScalar.h"
#include "ui/gfx/rect_conversions.h"
#include "ui/gfx/rect_f.h"
namespace cc {
ContentLayerUpdater::ContentLayerUpdater(
scoped_ptr<LayerPainter> painter,
RenderingStatsInstrumentation* stats_instrumentation)
: rendering_stats_instrumentation_(stats_instrumentation),
painter_(painter.Pass()) {}
ContentLayerUpdater::~ContentLayerUpdater() {}
void ContentLayerUpdater::PaintContents(SkCanvas* canvas,
gfx::Rect content_rect,
float contents_width_scale,
float contents_height_scale,
gfx::Rect* resulting_opaque_rect,
RenderingStats* stats) {
TRACE_EVENT0("cc", "ContentLayerUpdater::PaintContents");
canvas->save();
canvas->translate(SkFloatToScalar(-content_rect.x()),
SkFloatToScalar(-content_rect.y()));
gfx::Rect layer_rect = content_rect;
if (contents_width_scale != 1.f || contents_height_scale != 1.f) {
canvas->scale(SkFloatToScalar(contents_width_scale),
SkFloatToScalar(contents_height_scale));
gfx::RectF rect = gfx::ScaleRect(
content_rect, 1.f / contents_width_scale, 1.f / contents_height_scale);
layer_rect = gfx::ToEnclosingRect(rect);
}
SkPaint paint;
paint.setAntiAlias(false);
paint.setXfermodeMode(SkXfermode::kClear_Mode);
SkRect layer_sk_rect = SkRect::MakeXYWH(
layer_rect.x(), layer_rect.y(), layer_rect.width(), layer_rect.height());
canvas->drawRect(layer_sk_rect, paint);
canvas->clipRect(layer_sk_rect);
gfx::RectF opaque_layer_rect;
base::TimeTicks paint_begin_time;
if (stats)
paint_begin_time = base::TimeTicks::Now();
painter_->Paint(canvas, layer_rect, &opaque_layer_rect);
if (stats) {
stats->total_paint_time += base::TimeTicks::Now() - paint_begin_time;
stats->total_pixels_painted += content_rect.width() * content_rect.height();
}
canvas->restore();
gfx::RectF opaque_content_rect = gfx::ScaleRect(
opaque_layer_rect, contents_width_scale, contents_height_scale);
*resulting_opaque_rect = gfx::ToEnclosedRect(opaque_content_rect);
content_rect_ = content_rect;
}
} // namespace cc
|
codenote/chromium-test
|
cc/resources/content_layer_updater.cc
|
C++
|
bsd-3-clause
| 2,848 |
# Copyright (C) 2010 CENATIC: Centro Nacional de Referencia de
# Aplicacion de las TIC basadas en Fuentes Abiertas, Spain.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# Neither the name of the CENATIC nor the names of its contributors
# may be used to endorse or promote products derived from this
# software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# You may contact the copyright holder at: Fundacion CENATIC, Edificio
# de Servicios Sociales: C/ Vistahermosa, 1, 3ra planta, 06200
# Almendralejo (Badajoz), Spain
from DBSlayer import Query
def get_type_name (type_id):
l = get_type (type_id)
if not l:
return None
return l['name']
def get_type (type_id):
q = "SELECT id, type "\
"FROM asset_types WHERE id=%(type_id)s;" % locals()
query = Query(q)
if len(query) != 1:
return None
ret = {'id': type_id,
'name': query['type'][0]}
return ret
def get_types ():
q = "SELECT id, type "\
"FROM asset_types;" % locals()
query = Query(q)
if not len(query):
return None
ret = []
for x in query:
d={'id': query[x]['id'],
'name': query[x]['type']}
ret.append(d)
return ret
def test ():
import sys
try:
type_id = sys.argv[1]
except IndexError:
print 'Required test parameters: type_id'
sys.exit(1)
print 'Types:', get_types()
print 'type_id %s, type_name %s' % (type_id, get_type_name(type_id))
print get_type(type_id),
if __name__ == '__main__':
test()
|
helix84/activae
|
src/Type.py
|
Python
|
bsd-3-clause
| 2,833 |
# -*- coding: utf-8 -*-
'''
Production Configurations
- Use djangosecure
- Use mailgun to send emails
- Use redis
'''
from __future__ import absolute_import, unicode_literals
from django.utils import six
from .common import * # noqa
# SECRET CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
# Raises ImproperlyConfigured exception if DJANO_SECRET_KEY not in os.environ
SECRET_KEY = env("DJANGO_SECRET_KEY")
# This ensures that Django will be able to detect a secure connection
# properly on Heroku.
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# django-secure
# ------------------------------------------------------------------------------
INSTALLED_APPS += ("djangosecure", )
MIDDLEWARE_CLASSES = (
# Make sure djangosecure.middleware.SecurityMiddleware is listed first
'djangosecure.middleware.SecurityMiddleware',
) + MIDDLEWARE_CLASSES
# set this to 60 seconds and then to 518400 when you can prove it works
SECURE_HSTS_SECONDS = 60
SECURE_HSTS_INCLUDE_SUBDOMAINS = env.bool("DJANGO_SECURE_HSTS_INCLUDE_SUBDOMAINS", default=True)
SECURE_FRAME_DENY = env.bool("DJANGO_SECURE_FRAME_DENY", default=True)
SECURE_CONTENT_TYPE_NOSNIFF = env.bool("DJANGO_SECURE_CONTENT_TYPE_NOSNIFF", default=True)
SECURE_BROWSER_XSS_FILTER = True
SESSION_COOKIE_SECURE = False
SESSION_COOKIE_HTTPONLY = True
SECURE_SSL_REDIRECT = env.bool("DJANGO_SECURE_SSL_REDIRECT", default=True)
# SITE CONFIGURATION
# ------------------------------------------------------------------------------
# Hosts/domain names that are valid for this site
# See https://docs.djangoproject.com/en/1.6/ref/settings/#allowed-hosts
ALLOWED_HOSTS = ["*"]
# END SITE CONFIGURATION
# EMAIL
# ------------------------------------------------------------------------------
DEFAULT_FROM_EMAIL = env('DJANGO_DEFAULT_FROM_EMAIL',
default='{{cookiecutter.project_name}} <noreply@{{cookiecutter.domain_name}}>')
EMAIL_BACKEND = 'django_mailgun.MailgunBackend'
MAILGUN_ACCESS_KEY = env('DJANGO_MAILGUN_API_KEY')
MAILGUN_SERVER_NAME = env('DJANGO_MAILGUN_SERVER_NAME')
EMAIL_SUBJECT_PREFIX = env("DJANGO_EMAIL_SUBJECT_PREFIX", default='[{{cookiecutter.project_name}}] ')
SERVER_EMAIL = env('DJANGO_SERVER_EMAIL', default=DEFAULT_FROM_EMAIL)
# TEMPLATE CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/templates/api/#django.template.loaders.cached.Loader
TEMPLATES[0]['OPTIONS']['loaders'] = [
('django.template.loaders.cached.Loader', [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
]),
]
# CACHE CONFIGURATION
# ------------------------------------------------------------------------------
CACHES = {
'default': {
'BACKEND': 'redis_cache.RedisCache',
'LOCATION': [
'redis:6379',
],
'OPTIONS': {
'DB': 1,
'PARSER_CLASS': 'redis.connection.HiredisParser',
'CONNECTION_POOL_CLASS': 'redis.BlockingConnectionPool',
'CONNECTION_POOL_CLASS_KWARGS': {
'max_connections': 50,
'timeout': 20,
},
'MAX_CONNECTIONS': 1000,
'PICKLE_VERSION': -1,
},
},
}
# ASSET CONFIGURATION
# ------------------------------------------------------------------------------
STATIC_URL = '/static/'
MEDIA_URL = '/media/'
STATIC_ROOT = '/static'
MEDIA_ROOT = '/media'
STATICFILES_DIRS = (
unicode(APPS_DIR.path("static")),
)
{% if cookiecutter.use_celery %}
# CELERY BROKER CONFIGURATION
# ------------------------------------------------------------------------------
BROKER_URL = "amqp://guest:guest@rabbitmq:5672//"
{% endif %}
{% if cookiecutter.use_sentry %}
# SENTRY CONFIGURATION
# ------------------------------------------------------------------------------
RAVEN_CONFIG = {
'dsn': env("SENTRY_URL"),
}
INSTALLED_APPS = INSTALLED_APPS + (
'raven.contrib.django.raven_compat',
)
{% endif %}
# Your production stuff: Below this line define 3rd party library settings
|
jayfk/cookiecutter-django-docker
|
{{cookiecutter.repo_name}}/config/settings/production.py
|
Python
|
bsd-3-clause
| 4,238 |
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.sync;
import android.test.FlakyTest;
import android.test.suitebuilder.annotation.LargeTest;
import android.util.Pair;
import org.chromium.base.ThreadUtils;
import org.chromium.base.test.util.Feature;
import org.chromium.chrome.browser.ChromeApplication;
import org.chromium.chrome.browser.tabmodel.TabModelSelector;
import org.chromium.chrome.browser.tabmodel.TabModelUtils;
import org.chromium.chrome.browser.util.FeatureUtilities;
import org.chromium.chrome.test.util.browser.sync.SyncTestUtil;
import org.chromium.content.browser.test.util.Criteria;
import org.chromium.sync.protocol.EntitySpecifics;
import org.chromium.sync.protocol.SessionHeader;
import org.chromium.sync.protocol.SessionSpecifics;
import org.chromium.sync.protocol.SessionTab;
import org.chromium.sync.protocol.SessionWindow;
import org.chromium.sync.protocol.SyncEnums;
import org.chromium.sync.protocol.TabNavigation;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
/**
* Test suite for the open tabs (sessions) sync data type.
*/
public class OpenTabsTest extends SyncTestBase {
private static final String TAG = "OpenTabsTest";
private static final String OPEN_TABS_TYPE = "Sessions";
// EmbeddedTestServer is preferred here but it can't be used. The test server
// serves pages on localhost and Chrome doesn't sync localhost URLs as typed URLs.
// This type of URL requires no external data connection or resources.
private static final String URL = "data:text,OpenTabsTestURL";
private static final String URL2 = "data:text,OpenTabsTestURL2";
private static final String URL3 = "data:text,OpenTabsTestURL3";
private static final String SESSION_TAG_PREFIX = "FakeSessionTag";
private static final String FAKE_CLIENT = "FakeClient";
// The client name for tabs generated locally will vary based on the device the test is
// running on, so it is determined once in the setUp() method and cached here.
private String mClientName;
// A counter used for generating unique session tags. Resets to 0 in setUp().
private int mSessionTagCounter;
// A container to store OpenTabs information for data verification.
private static class OpenTabs {
public final String headerId;
public final List<String> tabIds;
public final List<String> urls;
private OpenTabs(String headerId, List<String> tabIds, List<String> urls) {
this.headerId = headerId;
this.tabIds = tabIds;
this.urls = urls;
}
}
@Override
protected void setUp() throws Exception {
super.setUp();
setUpTestAccountAndSignInToSync();
mClientName = getClientName();
mSessionTagCounter = 0;
}
// Test syncing an open tab from client to server.
@LargeTest
@Feature({"Sync"})
public void testUploadOpenTab() throws Exception {
loadUrl(URL);
waitForLocalTabsForClient(mClientName, URL);
waitForServerTabs(URL);
}
/*
// Test syncing multiple open tabs from client to server.
@LargeTest
@Feature({"Sync"})
https://crbug.com/592437
*/
@FlakyTest
public void testUploadMultipleOpenTabs() throws Exception {
loadUrl(URL);
loadUrlInNewTab(URL2);
loadUrlInNewTab(URL3);
waitForLocalTabsForClient(mClientName, URL, URL2, URL3);
waitForServerTabs(URL, URL2, URL3);
}
/*
// Test syncing an open tab from client to server.
@LargeTest
@Feature({"Sync"})
https://crbug.com/592437
*/
@FlakyTest
public void testUploadAndCloseOpenTab() throws Exception {
loadUrl(URL);
// Can't have zero tabs, so we have to open two to test closing one.
loadUrlInNewTab(URL2);
waitForLocalTabsForClient(mClientName, URL, URL2);
waitForServerTabs(URL, URL2);
ThreadUtils.runOnUiThreadBlocking(new Runnable() {
@Override
public void run() {
TabModelSelector selector = FeatureUtilities.isDocumentMode(getActivity())
? ChromeApplication.getDocumentTabModelSelector()
: getActivity().getTabModelSelector();
assertTrue(TabModelUtils.closeCurrentTab(selector.getCurrentModel()));
}
});
waitForLocalTabsForClient(mClientName, URL);
waitForServerTabs(URL);
}
// Test syncing an open tab from server to client.
@LargeTest
@Feature({"Sync"})
public void testDownloadOpenTab() throws Exception {
addFakeServerTabs(FAKE_CLIENT, URL);
SyncTestUtil.triggerSync();
waitForLocalTabsForClient(FAKE_CLIENT, URL);
}
// Test syncing multiple open tabs from server to client.
@LargeTest
@Feature({"Sync"})
public void testDownloadMultipleOpenTabs() throws Exception {
addFakeServerTabs(FAKE_CLIENT, URL, URL2, URL3);
SyncTestUtil.triggerSync();
waitForLocalTabsForClient(FAKE_CLIENT, URL, URL2, URL3);
}
// Test syncing a tab deletion from server to client.
@LargeTest
@Feature({"Sync"})
public void testDownloadDeletedOpenTab() throws Exception {
// Add the entity to test deleting.
addFakeServerTabs(FAKE_CLIENT, URL);
SyncTestUtil.triggerSync();
waitForLocalTabsForClient(FAKE_CLIENT, URL);
// Delete on server, sync, and verify deleted locally.
deleteServerTabsForClient(FAKE_CLIENT);
SyncTestUtil.triggerSync();
waitForLocalTabsForClient(FAKE_CLIENT);
}
// Test syncing multiple tab deletions from server to client.
@LargeTest
@Feature({"Sync"})
public void testDownloadMultipleDeletedOpenTabs() throws Exception {
// Add the entity to test deleting.
addFakeServerTabs(FAKE_CLIENT, URL, URL2, URL3);
SyncTestUtil.triggerSync();
waitForLocalTabsForClient(FAKE_CLIENT, URL, URL2, URL3);
// Delete on server, sync, and verify deleted locally.
deleteServerTabsForClient(FAKE_CLIENT);
SyncTestUtil.triggerSync();
waitForLocalTabsForClient(FAKE_CLIENT);
}
private String makeSessionTag() {
return SESSION_TAG_PREFIX + (mSessionTagCounter++);
}
private void addFakeServerTabs(String clientName, String... urls)
throws InterruptedException {
String tag = makeSessionTag();
EntitySpecifics header = makeSessionEntity(tag, clientName, urls.length);
mFakeServerHelper.injectUniqueClientEntity(tag, header);
for (int i = 0; i < urls.length; i++) {
EntitySpecifics tab = makeTabEntity(tag, urls[i], i);
// It is critical that the name here is "<tag> <tabNodeId>", otherwise sync crashes
// when it tries to sync due to the use of TabIdToTag in sessions_sync_manager.cc.
mFakeServerHelper.injectUniqueClientEntity(tag + " " + i, tab);
}
}
private EntitySpecifics makeSessionEntity(String tag, String clientName, int numTabs) {
EntitySpecifics specifics = new EntitySpecifics();
specifics.session = new SessionSpecifics();
specifics.session.sessionTag = tag;
specifics.session.header = new SessionHeader();
specifics.session.header.clientName = clientName;
specifics.session.header.deviceType = SyncEnums.TYPE_PHONE;
SessionWindow window = new SessionWindow();
window.windowId = 0;
window.selectedTabIndex = 0;
window.tab = new int[numTabs];
for (int i = 0; i < numTabs; i++) {
window.tab[i] = i;
}
specifics.session.header.window = new SessionWindow[] { window };
return specifics;
}
private EntitySpecifics makeTabEntity(String tag, String url, int id) {
EntitySpecifics specifics = new EntitySpecifics();
specifics.session = new SessionSpecifics();
specifics.session.sessionTag = tag;
specifics.session.tabNodeId = id;
SessionTab tab = new SessionTab();
tab.tabId = id;
tab.currentNavigationIndex = 0;
TabNavigation nav = new TabNavigation();
nav.virtualUrl = url;
tab.navigation = new TabNavigation[] { nav };
specifics.session.tab = tab;
return specifics;
}
private void deleteServerTabsForClient(String clientName) throws JSONException {
OpenTabs openTabs = getLocalTabsForClient(clientName);
mFakeServerHelper.deleteEntity(openTabs.headerId);
for (String tabId : openTabs.tabIds) {
mFakeServerHelper.deleteEntity(tabId);
}
}
private void waitForLocalTabsForClient(final String clientName, String... urls)
throws InterruptedException {
final List<String> urlList = new ArrayList<String>(urls.length);
for (String url : urls) urlList.add(url);
pollInstrumentationThread(Criteria.equals(urlList, new Callable<List<String>>() {
@Override
public List<String> call() throws Exception {
return getLocalTabsForClient(clientName).urls;
}
}));
}
private void waitForServerTabs(final String... urls)
throws InterruptedException {
pollInstrumentationThread(
new Criteria("Expected server open tabs: " + Arrays.toString(urls)) {
@Override
public boolean isSatisfied() {
try {
return mFakeServerHelper.verifySessions(urls);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
});
}
private String getClientName() throws Exception {
pollInstrumentationThread(Criteria.equals(2, new Callable<Integer>() {
@Override
public Integer call() throws Exception {
return SyncTestUtil.getLocalData(mContext, OPEN_TABS_TYPE).size();
}
}));
List<Pair<String, JSONObject>> tabEntities = SyncTestUtil.getLocalData(
mContext, OPEN_TABS_TYPE);
for (Pair<String, JSONObject> tabEntity : tabEntities) {
if (tabEntity.second.has("header")) {
return tabEntity.second.getJSONObject("header").getString("client_name");
}
}
throw new IllegalStateException("No client name found.");
}
private static class HeaderInfo {
public final String sessionTag;
public final String headerId;
public final List<String> tabIds;
public HeaderInfo(String sessionTag, String headerId, List<String> tabIds) {
this.sessionTag = sessionTag;
this.headerId = headerId;
this.tabIds = tabIds;
}
}
// Distills the local session data into a simple data object for the given client.
private OpenTabs getLocalTabsForClient(String clientName) throws JSONException {
List<Pair<String, JSONObject>> tabEntities = SyncTestUtil.getLocalData(
mContext, OPEN_TABS_TYPE);
// Output lists.
List<String> urls = new ArrayList<String>();
List<String> tabEntityIds = new ArrayList<String>();
HeaderInfo info = findHeaderInfoForClient(clientName, tabEntities);
if (info.sessionTag == null) {
// No client was found. Here we still want to return an empty list of urls.
return new OpenTabs("", tabEntityIds, urls);
}
Map<String, String> tabIdsToUrls = new HashMap<String, String>();
Map<String, String> tabIdsToEntityIds = new HashMap<String, String>();
findTabMappings(info.sessionTag, tabEntities, tabIdsToUrls, tabIdsToEntityIds);
// Convert the tabId list to the url list.
for (String tabId : info.tabIds) {
urls.add(tabIdsToUrls.get(tabId));
tabEntityIds.add(tabIdsToEntityIds.get(tabId));
}
return new OpenTabs(info.headerId, tabEntityIds, urls);
}
// Find the header entity for clientName and extract its sessionTag and tabId list.
private HeaderInfo findHeaderInfoForClient(
String clientName, List<Pair<String, JSONObject>> tabEntities) throws JSONException {
String sessionTag = null;
String headerId = null;
List<String> tabIds = new ArrayList<String>();
for (Pair<String, JSONObject> tabEntity : tabEntities) {
JSONObject header = tabEntity.second.optJSONObject("header");
if (header != null && header.getString("client_name").equals(clientName)) {
sessionTag = tabEntity.second.getString("session_tag");
headerId = tabEntity.first;
JSONArray windows = header.getJSONArray("window");
if (windows.length() == 0) {
// The client was found but there are no tabs.
break;
}
assertEquals("Only single windows are supported.", 1, windows.length());
JSONArray tabs = windows.getJSONObject(0).getJSONArray("tab");
for (int i = 0; i < tabs.length(); i++) {
tabIds.add(tabs.getString(i));
}
break;
}
}
return new HeaderInfo(sessionTag, headerId, tabIds);
}
// Find the associated tabs and record their tabId -> url and entityId mappings.
private void findTabMappings(String sessionTag, List<Pair<String, JSONObject>> tabEntities,
// Populating these maps is the output of this function.
Map<String, String> tabIdsToUrls, Map<String, String> tabIdsToEntityIds)
throws JSONException {
for (Pair<String, JSONObject> tabEntity : tabEntities) {
JSONObject json = tabEntity.second;
if (json.has("tab") && json.getString("session_tag").equals(sessionTag)) {
JSONObject tab = json.getJSONObject("tab");
int i = tab.getInt("current_navigation_index");
String tabId = tab.getString("tab_id");
String url = tab.getJSONArray("navigation")
.getJSONObject(i).getString("virtual_url");
tabIdsToUrls.put(tabId, url);
tabIdsToEntityIds.put(tabId, tabEntity.first);
}
}
}
}
|
was4444/chromium.src
|
chrome/android/sync_shell/javatests/src/org/chromium/chrome/browser/sync/OpenTabsTest.java
|
Java
|
bsd-3-clause
| 14,827 |
/*
* Copyright 2016 Facebook, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package io.reactivesocket.tck
import java.io.{File, PrintWriter}
class RequesterDSL {
val filename = this.getClass.getSimpleName.reverse.substring(1).reverse + ".txt"
if (!filename.equals("RequesterReflection.txt")) println("writing to " + filename)
var writer: PrintWriter = new PrintWriter(new File(filename))
def requestResponse(data: String, metadata: String) : DSLTestSubscriber =
new DSLTestSubscriber(writer, data, metadata, "rr")
def requestStream(data: String, metadata: String) : DSLTestSubscriber =
new DSLTestSubscriber(writer, data, metadata, "rs")
def firenForget(data: String, metadata: String) : DSLTestSubscriber =
new DSLTestSubscriber(writer, data, metadata, "fnf")
def requestSubscription(data: String, metadata: String) : DSLTestSubscriber =
new DSLTestSubscriber(writer, data, metadata, "sub")
def end() : Unit = {
writer.write("EOF\n")
writer.close()
}
def begintest() : Unit = {
writer.write("!\n")
}
def nametest(name: String) : Unit = writer.write("name%%" + name + "\n")
trait ChannelHandler {
def using(data: String, meta: String) : ChannelHandler
def asFollows(f: () => Unit): Unit
}
object requestChannel extends ChannelHandler {
override def using(data: String, meta: String) : ChannelHandler = {
writer.write("channel%%" + data + "%%" + meta + "%%")
this
}
override def asFollows(f: () => Unit) = {
writer.write("{\n")
f()
writer.write("}\n")
}
}
object createEchoChannel {
def using(data: String, meta: String) : Unit = writer.write("echochannel%%" + data + "%%" + meta + "\n")
}
def channelSubscriber() : DSLTestSubscriber = {
// we create a trivial subscriber because we don't need a "real" one, because we will already pass in a test
// subscriber in the driver, as one should have already been created to get the initial payload from the client
return new DSLTestSubscriber(writer, "", "", "");
}
def respond(marble : String) : Unit = {
writer.write("respond%%" + marble + "\n")
}
def pass() : Unit = writer.write("pass\n")
def fail() : Unit = writer.write("fail\n")
}
|
xytosis/reactivesocket-tck
|
src/main/scala/io/reactivesocket/tck/RequesterDSL.scala
|
Scala
|
bsd-3-clause
| 2,782 |
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/password_manager/password_store_mac.h"
#include "base/basictypes.h"
#include "base/files/scoped_temp_dir.h"
#include "base/scoped_observer.h"
#include "base/stl_util.h"
#include "base/strings/string_util.h"
#include "base/strings/utf_string_conversions.h"
#include "base/synchronization/waitable_event.h"
#include "base/test/histogram_tester.h"
#include "base/thread_task_runner_handle.h"
#include "chrome/browser/password_manager/password_store_mac_internal.h"
#include "chrome/common/chrome_paths.h"
#include "components/os_crypt/os_crypt.h"
#include "components/password_manager/core/browser/login_database.h"
#include "components/password_manager/core/browser/password_manager_test_utils.h"
#include "components/password_manager/core/browser/password_store_consumer.h"
#include "content/public/test/test_browser_thread.h"
#include "content/public/test/test_utils.h"
#include "crypto/mock_apple_keychain.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
using autofill::PasswordForm;
using base::ASCIIToUTF16;
using base::WideToUTF16;
using content::BrowserThread;
using crypto::MockAppleKeychain;
using internal_keychain_helpers::FormsMatchForMerge;
using internal_keychain_helpers::STRICT_FORM_MATCH;
using password_manager::CreatePasswordFormFromDataForTesting;
using password_manager::LoginDatabase;
using password_manager::PasswordFormData;
using password_manager::PasswordStore;
using password_manager::PasswordStoreChange;
using password_manager::PasswordStoreChangeList;
using password_manager::PasswordStoreConsumer;
using testing::_;
using testing::DoAll;
using testing::Invoke;
using testing::IsEmpty;
using testing::SizeIs;
using testing::WithArg;
namespace {
ACTION(QuitUIMessageLoop) {
DCHECK_CURRENTLY_ON(BrowserThread::UI);
base::MessageLoop::current()->Quit();
}
// From the mock's argument #0 of type const std::vector<PasswordForm*>& takes
// the first form and copies it to the form pointed to by |target_form_ptr|.
ACTION_P(SaveACopyOfFirstForm, target_form_ptr) {
ASSERT_FALSE(arg0.empty());
*target_form_ptr = *arg0[0];
}
void Noop() {
}
class MockPasswordStoreConsumer : public PasswordStoreConsumer {
public:
MOCK_METHOD1(OnGetPasswordStoreResultsConstRef,
void(const std::vector<PasswordForm*>&));
// GMock cannot mock methods with move-only args.
void OnGetPasswordStoreResults(ScopedVector<PasswordForm> results) override {
OnGetPasswordStoreResultsConstRef(results.get());
}
};
class MockPasswordStoreObserver : public PasswordStore::Observer {
public:
MOCK_METHOD1(OnLoginsChanged,
void(const password_manager::PasswordStoreChangeList& changes));
};
// A LoginDatabase that simulates an Init() method that takes a long time.
class SlowToInitLoginDatabase : public password_manager::LoginDatabase {
public:
// Creates an instance whose Init() method will block until |event| is
// signaled. |event| must outlive |this|.
SlowToInitLoginDatabase(const base::FilePath& db_path,
base::WaitableEvent* event)
: password_manager::LoginDatabase(db_path), event_(event) {}
~SlowToInitLoginDatabase() override {}
// LoginDatabase:
bool Init() override {
event_->Wait();
return password_manager::LoginDatabase::Init();
}
private:
base::WaitableEvent* event_;
DISALLOW_COPY_AND_ASSIGN(SlowToInitLoginDatabase);
};
#pragma mark -
// Macro to simplify calling CheckFormsAgainstExpectations with a useful label.
#define CHECK_FORMS(forms, expectations, i) \
CheckFormsAgainstExpectations(forms, expectations, #forms, i)
// Ensures that the data in |forms| match |expectations|, causing test failures
// for any discrepencies.
// TODO(stuartmorgan): This is current order-dependent; ideally it shouldn't
// matter if |forms| and |expectations| are scrambled.
void CheckFormsAgainstExpectations(
const std::vector<PasswordForm*>& forms,
const std::vector<PasswordFormData*>& expectations,
const char* forms_label, unsigned int test_number) {
EXPECT_EQ(expectations.size(), forms.size()) << forms_label << " in test "
<< test_number;
if (expectations.size() != forms.size())
return;
for (unsigned int i = 0; i < expectations.size(); ++i) {
SCOPED_TRACE(testing::Message() << forms_label << " in test " << test_number
<< ", item " << i);
PasswordForm* form = forms[i];
PasswordFormData* expectation = expectations[i];
EXPECT_EQ(expectation->scheme, form->scheme);
EXPECT_EQ(std::string(expectation->signon_realm), form->signon_realm);
EXPECT_EQ(GURL(expectation->origin), form->origin);
EXPECT_EQ(GURL(expectation->action), form->action);
EXPECT_EQ(WideToUTF16(expectation->submit_element), form->submit_element);
EXPECT_EQ(WideToUTF16(expectation->username_element),
form->username_element);
EXPECT_EQ(WideToUTF16(expectation->password_element),
form->password_element);
if (expectation->username_value) {
EXPECT_EQ(WideToUTF16(expectation->username_value), form->username_value);
EXPECT_EQ(WideToUTF16(expectation->username_value), form->display_name);
EXPECT_TRUE(form->skip_zero_click);
if (expectation->password_value &&
wcscmp(expectation->password_value,
password_manager::kTestingFederatedLoginMarker) == 0) {
EXPECT_TRUE(form->password_value.empty());
EXPECT_EQ(GURL(password_manager::kTestingFederationUrlSpec),
form->federation_url);
} else {
EXPECT_EQ(WideToUTF16(expectation->password_value),
form->password_value);
EXPECT_TRUE(form->federation_url.is_empty());
}
} else {
EXPECT_TRUE(form->blacklisted_by_user);
}
EXPECT_EQ(expectation->preferred, form->preferred);
EXPECT_EQ(expectation->ssl_valid, form->ssl_valid);
EXPECT_DOUBLE_EQ(expectation->creation_time,
form->date_created.ToDoubleT());
base::Time created = base::Time::FromDoubleT(expectation->creation_time);
EXPECT_EQ(
created + base::TimeDelta::FromDays(
password_manager::kTestingDaysAfterPasswordsAreSynced),
form->date_synced);
EXPECT_EQ(GURL(password_manager::kTestingIconUrlSpec), form->icon_url);
}
}
PasswordStoreChangeList AddChangeForForm(const PasswordForm& form) {
return PasswordStoreChangeList(
1, PasswordStoreChange(PasswordStoreChange::ADD, form));
}
} // namespace
#pragma mark -
class PasswordStoreMacInternalsTest : public testing::Test {
public:
void SetUp() override {
MockAppleKeychain::KeychainTestData test_data[] = {
// Basic HTML form.
{kSecAuthenticationTypeHTMLForm,
"some.domain.com",
kSecProtocolTypeHTTP,
NULL,
0,
NULL,
"20020601171500Z",
"joe_user",
"sekrit",
false},
// HTML form with path.
{kSecAuthenticationTypeHTMLForm,
"some.domain.com",
kSecProtocolTypeHTTP,
"/insecure.html",
0,
NULL,
"19991231235959Z",
"joe_user",
"sekrit",
false},
// Secure HTML form with path.
{kSecAuthenticationTypeHTMLForm,
"some.domain.com",
kSecProtocolTypeHTTPS,
"/secure.html",
0,
NULL,
"20100908070605Z",
"secure_user",
"password",
false},
// True negative item.
{kSecAuthenticationTypeHTMLForm,
"dont.remember.com",
kSecProtocolTypeHTTP,
NULL,
0,
NULL,
"20000101000000Z",
"",
"",
true},
// De-facto negative item, type one.
{kSecAuthenticationTypeHTMLForm,
"dont.remember.com",
kSecProtocolTypeHTTP,
NULL,
0,
NULL,
"20000101000000Z",
"Password Not Stored",
"",
false},
// De-facto negative item, type two.
{kSecAuthenticationTypeHTMLForm,
"dont.remember.com",
kSecProtocolTypeHTTPS,
NULL,
0,
NULL,
"20000101000000Z",
"Password Not Stored",
" ",
false},
// HTTP auth basic, with port and path.
{kSecAuthenticationTypeHTTPBasic,
"some.domain.com",
kSecProtocolTypeHTTP,
"/insecure.html",
4567,
"low_security",
"19980330100000Z",
"basic_auth_user",
"basic",
false},
// HTTP auth digest, secure.
{kSecAuthenticationTypeHTTPDigest,
"some.domain.com",
kSecProtocolTypeHTTPS,
NULL,
0,
"high_security",
"19980330100000Z",
"digest_auth_user",
"digest",
false},
// An FTP password with an invalid date, for edge-case testing.
{kSecAuthenticationTypeDefault,
"a.server.com",
kSecProtocolTypeFTP,
NULL,
0,
NULL,
"20010203040",
"abc",
"123",
false},
// Password for an Android application.
{kSecAuthenticationTypeHTMLForm,
"android://[email protected]/",
kSecProtocolTypeHTTPS,
"",
0,
NULL,
"20150515141312Z",
"joe_user",
"secret",
false},
};
keychain_ = new MockAppleKeychain();
for (unsigned int i = 0; i < arraysize(test_data); ++i) {
keychain_->AddTestItem(test_data[i]);
}
}
void TearDown() override {
ExpectCreatesAndFreesBalanced();
ExpectCreatorCodesSet();
delete keychain_;
}
protected:
// Causes a test failure unless everything returned from keychain_'s
// ItemCopyAttributesAndData, SearchCreateFromAttributes, and SearchCopyNext
// was correctly freed.
void ExpectCreatesAndFreesBalanced() {
EXPECT_EQ(0, keychain_->UnfreedSearchCount());
EXPECT_EQ(0, keychain_->UnfreedKeychainItemCount());
EXPECT_EQ(0, keychain_->UnfreedAttributeDataCount());
}
// Causes a test failure unless any Keychain items added during the test have
// their creator code set.
void ExpectCreatorCodesSet() {
EXPECT_TRUE(keychain_->CreatorCodesSetForAddedItems());
}
MockAppleKeychain* keychain_;
};
#pragma mark -
TEST_F(PasswordStoreMacInternalsTest, TestKeychainToFormTranslation) {
typedef struct {
const PasswordForm::Scheme scheme;
const char* signon_realm;
const char* origin;
const wchar_t* username; // Set to NULL to check for a blacklist entry.
const wchar_t* password;
const bool ssl_valid;
const int creation_year;
const int creation_month;
const int creation_day;
const int creation_hour;
const int creation_minute;
const int creation_second;
} TestExpectations;
TestExpectations expected[] = {
{ PasswordForm::SCHEME_HTML, "http://some.domain.com/",
"http://some.domain.com/", L"joe_user", L"sekrit", false,
2002, 6, 1, 17, 15, 0 },
{ PasswordForm::SCHEME_HTML, "http://some.domain.com/",
"http://some.domain.com/insecure.html", L"joe_user", L"sekrit", false,
1999, 12, 31, 23, 59, 59 },
{ PasswordForm::SCHEME_HTML, "https://some.domain.com/",
"https://some.domain.com/secure.html", L"secure_user", L"password", true,
2010, 9, 8, 7, 6, 5 },
{ PasswordForm::SCHEME_HTML, "http://dont.remember.com/",
"http://dont.remember.com/", NULL, NULL, false,
2000, 1, 1, 0, 0, 0 },
{ PasswordForm::SCHEME_HTML, "http://dont.remember.com/",
"http://dont.remember.com/", NULL, NULL, false,
2000, 1, 1, 0, 0, 0 },
{ PasswordForm::SCHEME_HTML, "https://dont.remember.com/",
"https://dont.remember.com/", NULL, NULL, true,
2000, 1, 1, 0, 0, 0 },
{ PasswordForm::SCHEME_BASIC, "http://some.domain.com:4567/low_security",
"http://some.domain.com:4567/insecure.html", L"basic_auth_user", L"basic",
false, 1998, 03, 30, 10, 00, 00 },
{ PasswordForm::SCHEME_DIGEST, "https://some.domain.com/high_security",
"https://some.domain.com/", L"digest_auth_user", L"digest", true,
1998, 3, 30, 10, 0, 0 },
// This one gives us an invalid date, which we will treat as a "NULL" date
// which is 1601.
{ PasswordForm::SCHEME_OTHER, "http://a.server.com/",
"http://a.server.com/", L"abc", L"123", false,
1601, 1, 1, 0, 0, 0 },
{ PasswordForm::SCHEME_HTML, "android://[email protected]/",
"", L"joe_user", L"secret", true,
2015, 5, 15, 14, 13, 12 },
};
for (unsigned int i = 0; i < arraysize(expected); ++i) {
// Create our fake KeychainItemRef; see MockAppleKeychain docs.
SecKeychainItemRef keychain_item =
reinterpret_cast<SecKeychainItemRef>(i + 1);
PasswordForm form;
bool parsed = internal_keychain_helpers::FillPasswordFormFromKeychainItem(
*keychain_, keychain_item, &form, true);
EXPECT_TRUE(parsed) << "In iteration " << i;
EXPECT_EQ(expected[i].scheme, form.scheme) << "In iteration " << i;
EXPECT_EQ(GURL(expected[i].origin), form.origin) << "In iteration " << i;
EXPECT_EQ(expected[i].ssl_valid, form.ssl_valid) << "In iteration " << i;
EXPECT_EQ(std::string(expected[i].signon_realm), form.signon_realm)
<< "In iteration " << i;
if (expected[i].username) {
EXPECT_EQ(WideToUTF16(expected[i].username), form.username_value)
<< "In iteration " << i;
EXPECT_EQ(WideToUTF16(expected[i].password), form.password_value)
<< "In iteration " << i;
EXPECT_FALSE(form.blacklisted_by_user) << "In iteration " << i;
} else {
EXPECT_TRUE(form.blacklisted_by_user) << "In iteration " << i;
}
base::Time::Exploded exploded_time;
form.date_created.UTCExplode(&exploded_time);
EXPECT_EQ(expected[i].creation_year, exploded_time.year)
<< "In iteration " << i;
EXPECT_EQ(expected[i].creation_month, exploded_time.month)
<< "In iteration " << i;
EXPECT_EQ(expected[i].creation_day, exploded_time.day_of_month)
<< "In iteration " << i;
EXPECT_EQ(expected[i].creation_hour, exploded_time.hour)
<< "In iteration " << i;
EXPECT_EQ(expected[i].creation_minute, exploded_time.minute)
<< "In iteration " << i;
EXPECT_EQ(expected[i].creation_second, exploded_time.second)
<< "In iteration " << i;
}
{
// Use an invalid ref, to make sure errors are reported.
SecKeychainItemRef keychain_item = reinterpret_cast<SecKeychainItemRef>(99);
PasswordForm form;
bool parsed = internal_keychain_helpers::FillPasswordFormFromKeychainItem(
*keychain_, keychain_item, &form, true);
EXPECT_FALSE(parsed);
}
}
TEST_F(PasswordStoreMacInternalsTest, TestKeychainSearch) {
struct TestDataAndExpectation {
const PasswordFormData data;
const size_t expected_fill_matches;
const size_t expected_merge_matches;
};
// Most fields are left blank because we don't care about them for searching.
/* clang-format off */
TestDataAndExpectation test_data[] = {
// An HTML form we've seen.
{ { PasswordForm::SCHEME_HTML, "http://some.domain.com/",
NULL, NULL, NULL, NULL, NULL, L"joe_user", NULL, false, false, 0 },
2, 2 },
{ { PasswordForm::SCHEME_HTML, "http://some.domain.com/",
NULL, NULL, NULL, NULL, NULL, L"wrong_user", NULL, false, false, 0 },
2, 0 },
// An HTML form we haven't seen
{ { PasswordForm::SCHEME_HTML, "http://www.unseendomain.com/",
NULL, NULL, NULL, NULL, NULL, L"joe_user", NULL, false, false, 0 },
0, 0 },
// Basic auth that should match.
{ { PasswordForm::SCHEME_BASIC, "http://some.domain.com:4567/low_security",
NULL, NULL, NULL, NULL, NULL, L"basic_auth_user", NULL, false, false,
0 },
1, 1 },
// Basic auth with the wrong port.
{ { PasswordForm::SCHEME_BASIC, "http://some.domain.com:1111/low_security",
NULL, NULL, NULL, NULL, NULL, L"basic_auth_user", NULL, false, false,
0 },
0, 0 },
// Digest auth we've saved under https, visited with http.
{ { PasswordForm::SCHEME_DIGEST, "http://some.domain.com/high_security",
NULL, NULL, NULL, NULL, NULL, L"digest_auth_user", NULL, false, false,
0 },
0, 0 },
// Digest auth that should match.
{ { PasswordForm::SCHEME_DIGEST, "https://some.domain.com/high_security",
NULL, NULL, NULL, NULL, NULL, L"wrong_user", NULL, false, true, 0 },
1, 0 },
// Digest auth with the wrong domain.
{ { PasswordForm::SCHEME_DIGEST, "https://some.domain.com/other_domain",
NULL, NULL, NULL, NULL, NULL, L"digest_auth_user", NULL, false, true,
0 },
0, 0 },
// Android credentials (both legacy ones with origin, and without).
{ { PasswordForm::SCHEME_HTML, "android://[email protected]/",
"android://[email protected]/", NULL, NULL, NULL, NULL, L"joe_user",
NULL, false, true, 0 },
1, 1 },
{ { PasswordForm::SCHEME_HTML, "android://[email protected]/",
NULL, NULL, NULL, NULL, NULL, L"joe_user", NULL, false, true, 0 },
1, 1 },
// Federated logins do not have a corresponding Keychain entry, and should
// not match the username/password stored for the same application. Note
// that it will match for filling, however, because that part does not know
// that it is a federated login.
{ { PasswordForm::SCHEME_HTML, "android://[email protected]/",
NULL, NULL, NULL, NULL, NULL, L"joe_user",
password_manager::kTestingFederatedLoginMarker, false, true, 0 },
1, 0 },
/// Garbage forms should have no matches.
{ { PasswordForm::SCHEME_HTML, "foo/bar/baz",
NULL, NULL, NULL, NULL, NULL, NULL, NULL, false, false, 0 }, 0, 0 },
};
/* clang-format on */
MacKeychainPasswordFormAdapter keychain_adapter(keychain_);
MacKeychainPasswordFormAdapter owned_keychain_adapter(keychain_);
owned_keychain_adapter.SetFindsOnlyOwnedItems(true);
for (unsigned int i = 0; i < arraysize(test_data); ++i) {
scoped_ptr<PasswordForm> query_form =
CreatePasswordFormFromDataForTesting(test_data[i].data);
// Check matches treating the form as a fill target.
ScopedVector<autofill::PasswordForm> matching_items =
keychain_adapter.PasswordsFillingForm(query_form->signon_realm,
query_form->scheme);
EXPECT_EQ(test_data[i].expected_fill_matches, matching_items.size());
// Check matches treating the form as a merging target.
EXPECT_EQ(test_data[i].expected_merge_matches > 0,
keychain_adapter.HasPasswordsMergeableWithForm(*query_form));
std::vector<SecKeychainItemRef> keychain_items;
std::vector<internal_keychain_helpers::ItemFormPair> item_form_pairs =
internal_keychain_helpers::
ExtractAllKeychainItemAttributesIntoPasswordForms(&keychain_items,
*keychain_);
matching_items =
internal_keychain_helpers::ExtractPasswordsMergeableWithForm(
*keychain_, item_form_pairs, *query_form);
EXPECT_EQ(test_data[i].expected_merge_matches, matching_items.size());
STLDeleteContainerPairSecondPointers(item_form_pairs.begin(),
item_form_pairs.end());
for (std::vector<SecKeychainItemRef>::iterator i = keychain_items.begin();
i != keychain_items.end(); ++i) {
keychain_->Free(*i);
}
// None of the pre-seeded items are owned by us, so none should match an
// owned-passwords-only search.
matching_items = owned_keychain_adapter.PasswordsFillingForm(
query_form->signon_realm, query_form->scheme);
EXPECT_EQ(0U, matching_items.size());
}
}
// Changes just the origin path of |form|.
static void SetPasswordFormPath(PasswordForm* form, const char* path) {
GURL::Replacements replacement;
std::string new_value(path);
replacement.SetPathStr(new_value);
form->origin = form->origin.ReplaceComponents(replacement);
}
// Changes just the signon_realm port of |form|.
static void SetPasswordFormPort(PasswordForm* form, const char* port) {
GURL::Replacements replacement;
std::string new_value(port);
replacement.SetPortStr(new_value);
GURL signon_gurl = GURL(form->signon_realm);
form->signon_realm = signon_gurl.ReplaceComponents(replacement).spec();
}
// Changes just the signon_ream auth realm of |form|.
static void SetPasswordFormRealm(PasswordForm* form, const char* realm) {
GURL::Replacements replacement;
std::string new_value(realm);
replacement.SetPathStr(new_value);
GURL signon_gurl = GURL(form->signon_realm);
form->signon_realm = signon_gurl.ReplaceComponents(replacement).spec();
}
TEST_F(PasswordStoreMacInternalsTest, TestKeychainExactSearch) {
MacKeychainPasswordFormAdapter keychain_adapter(keychain_);
PasswordFormData base_form_data[] = {
{ PasswordForm::SCHEME_HTML, "http://some.domain.com/",
"http://some.domain.com/insecure.html",
NULL, NULL, NULL, NULL, L"joe_user", NULL, true, false, 0 },
{ PasswordForm::SCHEME_BASIC, "http://some.domain.com:4567/low_security",
"http://some.domain.com:4567/insecure.html",
NULL, NULL, NULL, NULL, L"basic_auth_user", NULL, true, false, 0 },
{ PasswordForm::SCHEME_DIGEST, "https://some.domain.com/high_security",
"https://some.domain.com",
NULL, NULL, NULL, NULL, L"digest_auth_user", NULL, true, true, 0 },
};
for (unsigned int i = 0; i < arraysize(base_form_data); ++i) {
// Create a base form and make sure we find a match.
scoped_ptr<PasswordForm> base_form =
CreatePasswordFormFromDataForTesting(base_form_data[i]);
EXPECT_TRUE(keychain_adapter.HasPasswordsMergeableWithForm(*base_form));
EXPECT_TRUE(keychain_adapter.HasPasswordExactlyMatchingForm(*base_form));
// Make sure that the matching isn't looser than it should be by checking
// that slightly altered forms don't match.
ScopedVector<autofill::PasswordForm> modified_forms;
modified_forms.push_back(new PasswordForm(*base_form));
modified_forms.back()->username_value = ASCIIToUTF16("wrong_user");
modified_forms.push_back(new PasswordForm(*base_form));
SetPasswordFormPath(modified_forms.back(), "elsewhere.html");
modified_forms.push_back(new PasswordForm(*base_form));
modified_forms.back()->scheme = PasswordForm::SCHEME_OTHER;
modified_forms.push_back(new PasswordForm(*base_form));
SetPasswordFormPort(modified_forms.back(), "1234");
modified_forms.push_back(new PasswordForm(*base_form));
modified_forms.back()->blacklisted_by_user = true;
if (base_form->scheme == PasswordForm::SCHEME_BASIC ||
base_form->scheme == PasswordForm::SCHEME_DIGEST) {
modified_forms.push_back(new PasswordForm(*base_form));
SetPasswordFormRealm(modified_forms.back(), "incorrect");
}
for (unsigned int j = 0; j < modified_forms.size(); ++j) {
bool match = keychain_adapter.HasPasswordExactlyMatchingForm(
*modified_forms[j]);
EXPECT_FALSE(match) << "In modified version " << j
<< " of base form " << i;
}
}
}
TEST_F(PasswordStoreMacInternalsTest, TestKeychainAdd) {
struct TestDataAndExpectation {
PasswordFormData data;
bool should_succeed;
};
/* clang-format off */
TestDataAndExpectation test_data[] = {
// Test a variety of scheme/port/protocol/path variations.
{ { PasswordForm::SCHEME_HTML, "http://web.site.com/",
"http://web.site.com/path/to/page.html", NULL, NULL, NULL, NULL,
L"anonymous", L"knock-knock", false, false, 0 }, true },
{ { PasswordForm::SCHEME_HTML, "https://web.site.com/",
"https://web.site.com/", NULL, NULL, NULL, NULL,
L"admin", L"p4ssw0rd", false, false, 0 }, true },
{ { PasswordForm::SCHEME_BASIC, "http://a.site.com:2222/therealm",
"http://a.site.com:2222/", NULL, NULL, NULL, NULL,
L"username", L"password", false, false, 0 }, true },
{ { PasswordForm::SCHEME_DIGEST, "https://digest.site.com/differentrealm",
"https://digest.site.com/secure.html", NULL, NULL, NULL, NULL,
L"testname", L"testpass", false, false, 0 }, true },
// Test that Android credentials can be stored. Also check the legacy form
// when |origin| was still filled with the Android URI (and not left empty).
{ { PasswordForm::SCHEME_HTML, "android://[email protected]/",
"", NULL, NULL, NULL, NULL,
L"joe_user", L"password", false, true, 0 }, true },
{ { PasswordForm::SCHEME_HTML, "android://[email protected]/",
"android://[email protected]/", NULL, NULL, NULL, NULL,
L"jane_user", L"password2", false, true, 0 }, true },
// Make sure that garbage forms are rejected.
{ { PasswordForm::SCHEME_HTML, "gobbledygook",
"gobbledygook", NULL, NULL, NULL, NULL,
L"anonymous", L"knock-knock", false, false, 0 }, false },
// Test that failing to update a duplicate (forced using the magic failure
// password; see MockAppleKeychain::ItemModifyAttributesAndData) is
// reported.
{ { PasswordForm::SCHEME_HTML, "http://some.domain.com",
"http://some.domain.com/insecure.html", NULL, NULL, NULL, NULL,
L"joe_user", L"fail_me", false, false, 0 }, false },
};
/* clang-format on */
MacKeychainPasswordFormAdapter owned_keychain_adapter(keychain_);
owned_keychain_adapter.SetFindsOnlyOwnedItems(true);
for (unsigned int i = 0; i < arraysize(test_data); ++i) {
scoped_ptr<PasswordForm> in_form =
CreatePasswordFormFromDataForTesting(test_data[i].data);
bool add_succeeded = owned_keychain_adapter.AddPassword(*in_form);
EXPECT_EQ(test_data[i].should_succeed, add_succeeded);
if (add_succeeded) {
EXPECT_TRUE(owned_keychain_adapter.HasPasswordsMergeableWithForm(
*in_form));
EXPECT_TRUE(owned_keychain_adapter.HasPasswordExactlyMatchingForm(
*in_form));
}
}
// Test that adding duplicate item updates the existing item.
// TODO(engedy): Add a test to verify that updating Android credentials work.
// See: https://crbug.com/476851.
{
PasswordFormData data = {
PasswordForm::SCHEME_HTML, "http://some.domain.com",
"http://some.domain.com/insecure.html", NULL,
NULL, NULL, NULL, L"joe_user", L"updated_password", false, false, 0
};
scoped_ptr<PasswordForm> update_form =
CreatePasswordFormFromDataForTesting(data);
MacKeychainPasswordFormAdapter keychain_adapter(keychain_);
EXPECT_TRUE(keychain_adapter.AddPassword(*update_form));
SecKeychainItemRef keychain_item = reinterpret_cast<SecKeychainItemRef>(2);
PasswordForm stored_form;
internal_keychain_helpers::FillPasswordFormFromKeychainItem(*keychain_,
keychain_item,
&stored_form,
true);
EXPECT_EQ(update_form->password_value, stored_form.password_value);
}
}
TEST_F(PasswordStoreMacInternalsTest, TestKeychainRemove) {
struct TestDataAndExpectation {
PasswordFormData data;
bool should_succeed;
};
/* clang-format off */
TestDataAndExpectation test_data[] = {
// Test deletion of an item that we add.
{ { PasswordForm::SCHEME_HTML, "http://web.site.com/",
"http://web.site.com/path/to/page.html", NULL, NULL, NULL, NULL,
L"anonymous", L"knock-knock", false, false, 0 }, true },
// Test that Android credentials can be removed. Also check the legacy case
// when |origin| was still filled with the Android URI (and not left empty).
{ { PasswordForm::SCHEME_HTML, "android://[email protected]/",
"", NULL, NULL, NULL, NULL,
L"joe_user", L"secret", false, true, 0 }, true },
{ { PasswordForm::SCHEME_HTML, "android://[email protected]/",
"android://[email protected]/", NULL, NULL, NULL, NULL,
L"jane_user", L"secret", false, true, 0 }, true },
// Make sure we don't delete items we don't own.
{ { PasswordForm::SCHEME_HTML, "http://some.domain.com/",
"http://some.domain.com/insecure.html", NULL, NULL, NULL, NULL,
L"joe_user", NULL, true, false, 0 }, false },
};
/* clang-format on */
MacKeychainPasswordFormAdapter owned_keychain_adapter(keychain_);
owned_keychain_adapter.SetFindsOnlyOwnedItems(true);
// Add our test items (except the last one) so that we can delete them.
for (unsigned int i = 0; i + 1 < arraysize(test_data); ++i) {
scoped_ptr<PasswordForm> add_form =
CreatePasswordFormFromDataForTesting(test_data[i].data);
EXPECT_TRUE(owned_keychain_adapter.AddPassword(*add_form));
}
for (unsigned int i = 0; i < arraysize(test_data); ++i) {
scoped_ptr<PasswordForm> form =
CreatePasswordFormFromDataForTesting(test_data[i].data);
EXPECT_EQ(test_data[i].should_succeed,
owned_keychain_adapter.RemovePassword(*form));
MacKeychainPasswordFormAdapter keychain_adapter(keychain_);
bool match = keychain_adapter.HasPasswordExactlyMatchingForm(*form);
EXPECT_EQ(test_data[i].should_succeed, !match);
}
}
TEST_F(PasswordStoreMacInternalsTest, TestFormMatch) {
PasswordForm base_form;
base_form.signon_realm = std::string("http://some.domain.com/");
base_form.origin = GURL("http://some.domain.com/page.html");
base_form.username_value = ASCIIToUTF16("joe_user");
{
// Check that everything unimportant can be changed.
PasswordForm different_form(base_form);
different_form.username_element = ASCIIToUTF16("username");
different_form.submit_element = ASCIIToUTF16("submit");
different_form.username_element = ASCIIToUTF16("password");
different_form.password_value = ASCIIToUTF16("sekrit");
different_form.action = GURL("http://some.domain.com/action.cgi");
different_form.ssl_valid = true;
different_form.preferred = true;
different_form.date_created = base::Time::Now();
EXPECT_TRUE(
FormsMatchForMerge(base_form, different_form, STRICT_FORM_MATCH));
// Check that path differences don't prevent a match.
base_form.origin = GURL("http://some.domain.com/other_page.html");
EXPECT_TRUE(
FormsMatchForMerge(base_form, different_form, STRICT_FORM_MATCH));
}
// Check that any one primary key changing is enough to prevent matching.
{
PasswordForm different_form(base_form);
different_form.scheme = PasswordForm::SCHEME_DIGEST;
EXPECT_FALSE(
FormsMatchForMerge(base_form, different_form, STRICT_FORM_MATCH));
}
{
PasswordForm different_form(base_form);
different_form.signon_realm = std::string("http://some.domain.com:8080/");
EXPECT_FALSE(
FormsMatchForMerge(base_form, different_form, STRICT_FORM_MATCH));
}
{
PasswordForm different_form(base_form);
different_form.username_value = ASCIIToUTF16("john.doe");
EXPECT_FALSE(
FormsMatchForMerge(base_form, different_form, STRICT_FORM_MATCH));
}
{
PasswordForm different_form(base_form);
different_form.blacklisted_by_user = true;
EXPECT_FALSE(
FormsMatchForMerge(base_form, different_form, STRICT_FORM_MATCH));
}
// Blacklist forms should *never* match for merging, even when identical
// (and certainly not when only one is a blacklist entry).
{
PasswordForm form_a(base_form);
form_a.blacklisted_by_user = true;
PasswordForm form_b(form_a);
EXPECT_FALSE(FormsMatchForMerge(form_a, form_b, STRICT_FORM_MATCH));
}
// Federated login forms should never match for merging either.
{
PasswordForm form_b(base_form);
form_b.federation_url = GURL(password_manager::kTestingFederationUrlSpec);
EXPECT_FALSE(FormsMatchForMerge(base_form, form_b, STRICT_FORM_MATCH));
EXPECT_FALSE(FormsMatchForMerge(form_b, base_form, STRICT_FORM_MATCH));
EXPECT_FALSE(FormsMatchForMerge(form_b, form_b, STRICT_FORM_MATCH));
}
}
TEST_F(PasswordStoreMacInternalsTest, TestFormMerge) {
// Set up a bunch of test data to use in varying combinations.
/* clang-format off */
PasswordFormData keychain_user_1 =
{ PasswordForm::SCHEME_HTML, "http://some.domain.com/",
"http://some.domain.com/", "", L"", L"", L"", L"joe_user", L"sekrit",
false, false, 1010101010 };
PasswordFormData keychain_user_1_with_path =
{ PasswordForm::SCHEME_HTML, "http://some.domain.com/",
"http://some.domain.com/page.html",
"", L"", L"", L"", L"joe_user", L"otherpassword",
false, false, 1010101010 };
PasswordFormData keychain_user_2 =
{ PasswordForm::SCHEME_HTML, "http://some.domain.com/",
"http://some.domain.com/", "", L"", L"", L"", L"john.doe", L"sesame",
false, false, 958739876 };
PasswordFormData keychain_blacklist =
{ PasswordForm::SCHEME_HTML, "http://some.domain.com/",
"http://some.domain.com/", "", L"", L"", L"", NULL, NULL,
false, false, 1010101010 };
PasswordFormData keychain_android =
{ PasswordForm::SCHEME_HTML, "android://[email protected]/",
"", "", L"", L"", L"", L"joe_user", L"secret",
false, true, 1234567890 };
PasswordFormData db_user_1 =
{ PasswordForm::SCHEME_HTML, "http://some.domain.com/",
"http://some.domain.com/", "http://some.domain.com/action.cgi",
L"submit", L"username", L"password", L"joe_user", L"",
true, false, 1212121212 };
PasswordFormData db_user_1_with_path =
{ PasswordForm::SCHEME_HTML, "http://some.domain.com/",
"http://some.domain.com/page.html",
"http://some.domain.com/handlepage.cgi",
L"submit", L"username", L"password", L"joe_user", L"",
true, false, 1234567890 };
PasswordFormData db_user_3_with_path =
{ PasswordForm::SCHEME_HTML, "http://some.domain.com/",
"http://some.domain.com/page.html",
"http://some.domain.com/handlepage.cgi",
L"submit", L"username", L"password", L"second-account", L"",
true, false, 1240000000 };
PasswordFormData database_blacklist_with_path =
{ PasswordForm::SCHEME_HTML, "http://some.domain.com/",
"http://some.domain.com/path.html", "http://some.domain.com/action.cgi",
L"submit", L"username", L"password", NULL, NULL,
true, false, 1212121212 };
PasswordFormData db_android =
{ PasswordForm::SCHEME_HTML, "android://[email protected]/",
"android://[email protected]/", "", L"", L"", L"", L"joe_user", L"",
false, true, 1234567890 };
PasswordFormData db_federated =
{ PasswordForm::SCHEME_HTML, "android://[email protected]/",
"android://[email protected]/", "", L"", L"", L"", L"joe_user",
password_manager::kTestingFederatedLoginMarker,
false, true, 3434343434 };
PasswordFormData merged_user_1 =
{ PasswordForm::SCHEME_HTML, "http://some.domain.com/",
"http://some.domain.com/", "http://some.domain.com/action.cgi",
L"submit", L"username", L"password", L"joe_user", L"sekrit",
true, false, 1212121212 };
PasswordFormData merged_user_1_with_db_path =
{ PasswordForm::SCHEME_HTML, "http://some.domain.com/",
"http://some.domain.com/page.html",
"http://some.domain.com/handlepage.cgi",
L"submit", L"username", L"password", L"joe_user", L"sekrit",
true, false, 1234567890 };
PasswordFormData merged_user_1_with_both_paths =
{ PasswordForm::SCHEME_HTML, "http://some.domain.com/",
"http://some.domain.com/page.html",
"http://some.domain.com/handlepage.cgi",
L"submit", L"username", L"password", L"joe_user", L"otherpassword",
true, false, 1234567890 };
PasswordFormData merged_android =
{ PasswordForm::SCHEME_HTML, "android://[email protected]/",
"android://[email protected]/", "", L"", L"", L"", L"joe_user",
L"secret", false, true, 1234567890 };
/* clang-format on */
// Build up the big multi-dimensional array of data sets that will actually
// drive the test. Use vectors rather than arrays so that initialization is
// simple.
enum {
KEYCHAIN_INPUT = 0,
DATABASE_INPUT,
MERGE_OUTPUT,
KEYCHAIN_OUTPUT,
DATABASE_OUTPUT,
MERGE_IO_ARRAY_COUNT // termination marker
};
const unsigned int kTestCount = 5;
std::vector< std::vector< std::vector<PasswordFormData*> > > test_data(
MERGE_IO_ARRAY_COUNT, std::vector< std::vector<PasswordFormData*> >(
kTestCount, std::vector<PasswordFormData*>()));
unsigned int current_test = 0;
// Test a merge with a few accounts in both systems, with partial overlap.
CHECK(current_test < kTestCount);
test_data[KEYCHAIN_INPUT][current_test].push_back(&keychain_user_1);
test_data[KEYCHAIN_INPUT][current_test].push_back(&keychain_user_2);
test_data[DATABASE_INPUT][current_test].push_back(&db_user_1);
test_data[DATABASE_INPUT][current_test].push_back(&db_user_1_with_path);
test_data[DATABASE_INPUT][current_test].push_back(&db_user_3_with_path);
test_data[MERGE_OUTPUT][current_test].push_back(&merged_user_1);
test_data[MERGE_OUTPUT][current_test].push_back(&merged_user_1_with_db_path);
test_data[KEYCHAIN_OUTPUT][current_test].push_back(&keychain_user_2);
test_data[DATABASE_OUTPUT][current_test].push_back(&db_user_3_with_path);
// Test a merge where Chrome has a blacklist entry, and the keychain has
// a stored account.
++current_test;
CHECK(current_test < kTestCount);
test_data[KEYCHAIN_INPUT][current_test].push_back(&keychain_user_1);
test_data[DATABASE_INPUT][current_test].push_back(
&database_blacklist_with_path);
// We expect both to be present because a blacklist could be specific to a
// subpath, and we want access to the password on other paths.
test_data[MERGE_OUTPUT][current_test].push_back(
&database_blacklist_with_path);
test_data[KEYCHAIN_OUTPUT][current_test].push_back(&keychain_user_1);
// Test a merge where Chrome has an account, and Keychain has a blacklist
// (from another browser) and the Chrome password data.
++current_test;
CHECK(current_test < kTestCount);
test_data[KEYCHAIN_INPUT][current_test].push_back(&keychain_blacklist);
test_data[KEYCHAIN_INPUT][current_test].push_back(&keychain_user_1);
test_data[DATABASE_INPUT][current_test].push_back(&db_user_1);
test_data[MERGE_OUTPUT][current_test].push_back(&merged_user_1);
test_data[KEYCHAIN_OUTPUT][current_test].push_back(&keychain_blacklist);
// Test that matches are done using exact path when possible.
++current_test;
CHECK(current_test < kTestCount);
test_data[KEYCHAIN_INPUT][current_test].push_back(&keychain_user_1);
test_data[KEYCHAIN_INPUT][current_test].push_back(&keychain_user_1_with_path);
test_data[DATABASE_INPUT][current_test].push_back(&db_user_1);
test_data[DATABASE_INPUT][current_test].push_back(&db_user_1_with_path);
test_data[MERGE_OUTPUT][current_test].push_back(&merged_user_1);
test_data[MERGE_OUTPUT][current_test].push_back(
&merged_user_1_with_both_paths);
// Test that Android credentails are matched correctly and that federated
// credentials are not tried to be matched with a Keychain item.
++current_test;
CHECK(current_test < kTestCount);
test_data[KEYCHAIN_INPUT][current_test].push_back(&keychain_android);
test_data[DATABASE_INPUT][current_test].push_back(&db_federated);
test_data[DATABASE_INPUT][current_test].push_back(&db_android);
test_data[MERGE_OUTPUT][current_test].push_back(&db_federated);
test_data[MERGE_OUTPUT][current_test].push_back(&merged_android);
for (unsigned int test_case = 0; test_case <= current_test; ++test_case) {
ScopedVector<autofill::PasswordForm> keychain_forms;
for (std::vector<PasswordFormData*>::iterator i =
test_data[KEYCHAIN_INPUT][test_case].begin();
i != test_data[KEYCHAIN_INPUT][test_case].end(); ++i) {
keychain_forms.push_back(
CreatePasswordFormFromDataForTesting(*(*i)).release());
}
ScopedVector<autofill::PasswordForm> database_forms;
for (std::vector<PasswordFormData*>::iterator i =
test_data[DATABASE_INPUT][test_case].begin();
i != test_data[DATABASE_INPUT][test_case].end(); ++i) {
database_forms.push_back(
CreatePasswordFormFromDataForTesting(*(*i)).release());
}
ScopedVector<autofill::PasswordForm> merged_forms;
internal_keychain_helpers::MergePasswordForms(&keychain_forms,
&database_forms,
&merged_forms);
CHECK_FORMS(keychain_forms.get(), test_data[KEYCHAIN_OUTPUT][test_case],
test_case);
CHECK_FORMS(database_forms.get(), test_data[DATABASE_OUTPUT][test_case],
test_case);
CHECK_FORMS(merged_forms.get(), test_data[MERGE_OUTPUT][test_case],
test_case);
}
}
TEST_F(PasswordStoreMacInternalsTest, TestPasswordBulkLookup) {
PasswordFormData db_data[] = {
{ PasswordForm::SCHEME_HTML, "http://some.domain.com/",
"http://some.domain.com/", "http://some.domain.com/action.cgi",
L"submit", L"username", L"password", L"joe_user", L"",
true, false, 1212121212 },
{ PasswordForm::SCHEME_HTML, "http://some.domain.com/",
"http://some.domain.com/page.html",
"http://some.domain.com/handlepage.cgi",
L"submit", L"username", L"password", L"joe_user", L"",
true, false, 1234567890 },
{ PasswordForm::SCHEME_HTML, "http://some.domain.com/",
"http://some.domain.com/page.html",
"http://some.domain.com/handlepage.cgi",
L"submit", L"username", L"password", L"second-account", L"",
true, false, 1240000000 },
{ PasswordForm::SCHEME_HTML, "http://dont.remember.com/",
"http://dont.remember.com/",
"http://dont.remember.com/handlepage.cgi",
L"submit", L"username", L"password", L"joe_user", L"",
true, false, 1240000000 },
{ PasswordForm::SCHEME_HTML, "http://some.domain.com/",
"http://some.domain.com/path.html", "http://some.domain.com/action.cgi",
L"submit", L"username", L"password", NULL, NULL,
true, false, 1212121212 },
};
ScopedVector<autofill::PasswordForm> database_forms;
for (unsigned int i = 0; i < arraysize(db_data); ++i) {
database_forms.push_back(
CreatePasswordFormFromDataForTesting(db_data[i]).release());
}
ScopedVector<autofill::PasswordForm> merged_forms;
internal_keychain_helpers::GetPasswordsForForms(*keychain_, &database_forms,
&merged_forms);
EXPECT_EQ(2U, database_forms.size());
ASSERT_EQ(3U, merged_forms.size());
EXPECT_EQ(ASCIIToUTF16("sekrit"), merged_forms[0]->password_value);
EXPECT_EQ(ASCIIToUTF16("sekrit"), merged_forms[1]->password_value);
EXPECT_TRUE(merged_forms[2]->blacklisted_by_user);
}
TEST_F(PasswordStoreMacInternalsTest, TestBlacklistedFiltering) {
PasswordFormData db_data[] = {
{ PasswordForm::SCHEME_HTML, "http://dont.remember.com/",
"http://dont.remember.com/",
"http://dont.remember.com/handlepage.cgi",
L"submit", L"username", L"password", L"joe_user", L"non_empty_password",
true, false, 1240000000 },
{ PasswordForm::SCHEME_HTML, "https://dont.remember.com/",
"https://dont.remember.com/",
"https://dont.remember.com/handlepage_secure.cgi",
L"submit", L"username", L"password", L"joe_user", L"non_empty_password",
true, false, 1240000000 },
};
ScopedVector<autofill::PasswordForm> database_forms;
for (unsigned int i = 0; i < arraysize(db_data); ++i) {
database_forms.push_back(
CreatePasswordFormFromDataForTesting(db_data[i]).release());
}
ScopedVector<autofill::PasswordForm> merged_forms;
internal_keychain_helpers::GetPasswordsForForms(*keychain_, &database_forms,
&merged_forms);
EXPECT_EQ(2U, database_forms.size());
ASSERT_EQ(0U, merged_forms.size());
}
TEST_F(PasswordStoreMacInternalsTest, TestFillPasswordFormFromKeychainItem) {
// When |extract_password_data| is false, the password field must be empty,
// and |blacklisted_by_user| must be false.
SecKeychainItemRef keychain_item = reinterpret_cast<SecKeychainItemRef>(1);
PasswordForm form_without_extracted_password;
bool parsed = internal_keychain_helpers::FillPasswordFormFromKeychainItem(
*keychain_,
keychain_item,
&form_without_extracted_password,
false); // Do not extract password.
EXPECT_TRUE(parsed);
ASSERT_TRUE(form_without_extracted_password.password_value.empty());
ASSERT_FALSE(form_without_extracted_password.blacklisted_by_user);
// When |extract_password_data| is true and the keychain entry has a non-empty
// password, the password field must be non-empty, and the value of
// |blacklisted_by_user| must be false.
keychain_item = reinterpret_cast<SecKeychainItemRef>(1);
PasswordForm form_with_extracted_password;
parsed = internal_keychain_helpers::FillPasswordFormFromKeychainItem(
*keychain_,
keychain_item,
&form_with_extracted_password,
true); // Extract password.
EXPECT_TRUE(parsed);
ASSERT_EQ(ASCIIToUTF16("sekrit"),
form_with_extracted_password.password_value);
ASSERT_FALSE(form_with_extracted_password.blacklisted_by_user);
// When |extract_password_data| is true and the keychain entry has an empty
// username and password (""), the password field must be empty, and the value
// of |blacklisted_by_user| must be true.
keychain_item = reinterpret_cast<SecKeychainItemRef>(4);
PasswordForm negative_form;
parsed = internal_keychain_helpers::FillPasswordFormFromKeychainItem(
*keychain_,
keychain_item,
&negative_form,
true); // Extract password.
EXPECT_TRUE(parsed);
ASSERT_TRUE(negative_form.username_value.empty());
ASSERT_TRUE(negative_form.password_value.empty());
ASSERT_TRUE(negative_form.blacklisted_by_user);
// When |extract_password_data| is true and the keychain entry has an empty
// password (""), the password field must be empty (""), and the value of
// |blacklisted_by_user| must be true.
keychain_item = reinterpret_cast<SecKeychainItemRef>(5);
PasswordForm form_with_empty_password_a;
parsed = internal_keychain_helpers::FillPasswordFormFromKeychainItem(
*keychain_,
keychain_item,
&form_with_empty_password_a,
true); // Extract password.
EXPECT_TRUE(parsed);
ASSERT_TRUE(form_with_empty_password_a.password_value.empty());
ASSERT_TRUE(form_with_empty_password_a.blacklisted_by_user);
// When |extract_password_data| is true and the keychain entry has a single
// space password (" "), the password field must be a single space (" "), and
// the value of |blacklisted_by_user| must be true.
keychain_item = reinterpret_cast<SecKeychainItemRef>(6);
PasswordForm form_with_empty_password_b;
parsed = internal_keychain_helpers::FillPasswordFormFromKeychainItem(
*keychain_,
keychain_item,
&form_with_empty_password_b,
true); // Extract password.
EXPECT_TRUE(parsed);
ASSERT_EQ(ASCIIToUTF16(" "),
form_with_empty_password_b.password_value);
ASSERT_TRUE(form_with_empty_password_b.blacklisted_by_user);
}
TEST_F(PasswordStoreMacInternalsTest, TestPasswordGetAll) {
MacKeychainPasswordFormAdapter keychain_adapter(keychain_);
MacKeychainPasswordFormAdapter owned_keychain_adapter(keychain_);
owned_keychain_adapter.SetFindsOnlyOwnedItems(true);
// Add a few passwords of various types so that we own some.
PasswordFormData owned_password_data[] = {
{ PasswordForm::SCHEME_HTML, "http://web.site.com/",
"http://web.site.com/path/to/page.html", NULL, NULL, NULL, NULL,
L"anonymous", L"knock-knock", false, false, 0 },
{ PasswordForm::SCHEME_BASIC, "http://a.site.com:2222/therealm",
"http://a.site.com:2222/", NULL, NULL, NULL, NULL,
L"username", L"password", false, false, 0 },
{ PasswordForm::SCHEME_DIGEST, "https://digest.site.com/differentrealm",
"https://digest.site.com/secure.html", NULL, NULL, NULL, NULL,
L"testname", L"testpass", false, false, 0 },
};
for (unsigned int i = 0; i < arraysize(owned_password_data); ++i) {
scoped_ptr<PasswordForm> form =
CreatePasswordFormFromDataForTesting(owned_password_data[i]);
owned_keychain_adapter.AddPassword(*form);
}
ScopedVector<autofill::PasswordForm> all_passwords =
keychain_adapter.GetAllPasswordFormPasswords();
EXPECT_EQ(9 + arraysize(owned_password_data), all_passwords.size());
ScopedVector<autofill::PasswordForm> owned_passwords =
owned_keychain_adapter.GetAllPasswordFormPasswords();
EXPECT_EQ(arraysize(owned_password_data), owned_passwords.size());
}
#pragma mark -
class PasswordStoreMacTest : public testing::Test {
public:
PasswordStoreMacTest() : ui_thread_(BrowserThread::UI, &message_loop_) {}
void SetUp() override {
ASSERT_TRUE(db_dir_.CreateUniqueTempDir());
histogram_tester_.reset(new base::HistogramTester);
// Ensure that LoginDatabase will use the mock keychain if it needs to
// encrypt/decrypt a password.
OSCrypt::UseMockKeychain(true);
login_db_.reset(
new password_manager::LoginDatabase(test_login_db_file_path()));
thread_.reset(new base::Thread("Chrome_PasswordStore_Thread"));
ASSERT_TRUE(thread_->Start());
ASSERT_TRUE(thread_->task_runner()->PostTask(
FROM_HERE, base::Bind(&PasswordStoreMacTest::InitLoginDatabase,
base::Unretained(login_db_.get()))));
CreateAndInitPasswordStore(login_db_.get());
// Make sure deferred initialization is performed before some tests start
// accessing the |login_db| directly.
FinishAsyncProcessing();
}
void TearDown() override {
ClosePasswordStore();
thread_.reset();
login_db_.reset();
// Whatever a test did, PasswordStoreMac stores only empty password values
// in LoginDatabase. The empty valus do not require encryption and therefore
// OSCrypt shouldn't call the Keychain. The histogram doesn't cover the
// internet passwords.
if (histogram_tester_) {
scoped_ptr<base::HistogramSamples> samples =
histogram_tester_->GetHistogramSamplesSinceCreation(
"OSX.Keychain.Access");
EXPECT_TRUE(!samples || samples->TotalCount() == 0);
}
}
static void InitLoginDatabase(password_manager::LoginDatabase* login_db) {
ASSERT_TRUE(login_db->Init());
}
void CreateAndInitPasswordStore(password_manager::LoginDatabase* login_db) {
store_ = new PasswordStoreMac(
base::ThreadTaskRunnerHandle::Get(), nullptr,
make_scoped_ptr<AppleKeychain>(new MockAppleKeychain));
ASSERT_TRUE(thread_->task_runner()->PostTask(
FROM_HERE, base::Bind(&PasswordStoreMac::InitWithTaskRunner, store_,
thread_->task_runner())));
ASSERT_TRUE(thread_->task_runner()->PostTask(
FROM_HERE, base::Bind(&PasswordStoreMac::set_login_metadata_db, store_,
base::Unretained(login_db))));
}
void ClosePasswordStore() {
if (!store_)
return;
store_->Shutdown();
store_ = nullptr;
}
// Verifies that the given |form| can be properly stored so that it can be
// retrieved by FillMatchingLogins() and GetAutofillableLogins(), and then it
// can be properly removed.
void VerifyCredentialLifecycle(const PasswordForm& form) {
// Run everything twice to make sure no garbage is left behind that would
// prevent storing the form a second time.
for (size_t iteration = 0; iteration < 2; ++iteration) {
SCOPED_TRACE(testing::Message("Iteration: ") << iteration);
MockPasswordStoreConsumer mock_consumer;
EXPECT_CALL(mock_consumer, OnGetPasswordStoreResultsConstRef(IsEmpty()))
.WillOnce(QuitUIMessageLoop());
store()->GetAutofillableLogins(&mock_consumer);
base::MessageLoop::current()->Run();
::testing::Mock::VerifyAndClearExpectations(&mock_consumer);
store()->AddLogin(form);
FinishAsyncProcessing();
PasswordForm returned_form;
EXPECT_CALL(mock_consumer, OnGetPasswordStoreResultsConstRef(SizeIs(1u)))
.WillOnce(
DoAll(SaveACopyOfFirstForm(&returned_form), QuitUIMessageLoop()));
// The query operations will also do some housekeeping: they will remove
// dangling credentials in the LoginDatabase without a matching Keychain
// item when one is expected. If the logic that stores the Keychain item
// is incorrect, this will wipe the newly added form before the second
// query.
store()->GetAutofillableLogins(&mock_consumer);
base::MessageLoop::current()->Run();
::testing::Mock::VerifyAndClearExpectations(&mock_consumer);
EXPECT_EQ(form, returned_form);
PasswordForm query_form = form;
query_form.password_value.clear();
query_form.username_value.clear();
EXPECT_CALL(mock_consumer, OnGetPasswordStoreResultsConstRef(SizeIs(1u)))
.WillOnce(
DoAll(SaveACopyOfFirstForm(&returned_form), QuitUIMessageLoop()));
store()->GetLogins(query_form, PasswordStore::ALLOW_PROMPT,
&mock_consumer);
base::MessageLoop::current()->Run();
::testing::Mock::VerifyAndClearExpectations(&mock_consumer);
EXPECT_EQ(form, returned_form);
store()->RemoveLogin(form);
}
}
base::FilePath test_login_db_file_path() const {
return db_dir_.path().Append(FILE_PATH_LITERAL("login.db"));
}
password_manager::LoginDatabase* login_db() const {
return store_->login_metadata_db();
}
MockAppleKeychain* keychain() {
return static_cast<MockAppleKeychain*>(store_->keychain());
}
void FinishAsyncProcessing() {
scoped_refptr<content::MessageLoopRunner> runner =
new content::MessageLoopRunner;
ASSERT_TRUE(thread_->task_runner()->PostTaskAndReply(
FROM_HERE, base::Bind(&Noop), runner->QuitClosure()));
runner->Run();
}
PasswordStoreMac* store() { return store_.get(); }
protected:
base::MessageLoopForUI message_loop_;
content::TestBrowserThread ui_thread_;
// Thread that the synchronous methods are run on.
scoped_ptr<base::Thread> thread_;
base::ScopedTempDir db_dir_;
scoped_ptr<password_manager::LoginDatabase> login_db_;
scoped_refptr<PasswordStoreMac> store_;
scoped_ptr<base::HistogramTester> histogram_tester_;
};
TEST_F(PasswordStoreMacTest, TestStoreUpdate) {
// Insert a password into both the database and the keychain.
// This is done manually, rather than through store_->AddLogin, because the
// Mock Keychain isn't smart enough to be able to support update generically,
// so some.domain.com triggers special handling to test it that make inserting
// fail.
PasswordFormData joint_data = {
PasswordForm::SCHEME_HTML, "http://some.domain.com/",
"http://some.domain.com/insecure.html", "login.cgi",
L"username", L"password", L"submit", L"joe_user", L"sekrit", true, false, 1
};
scoped_ptr<PasswordForm> joint_form =
CreatePasswordFormFromDataForTesting(joint_data);
EXPECT_EQ(AddChangeForForm(*joint_form), login_db()->AddLogin(*joint_form));
MockAppleKeychain::KeychainTestData joint_keychain_data = {
kSecAuthenticationTypeHTMLForm, "some.domain.com",
kSecProtocolTypeHTTP, "/insecure.html", 0, NULL, "20020601171500Z",
"joe_user", "sekrit", false };
keychain()->AddTestItem(joint_keychain_data);
// Insert a password into the keychain only.
MockAppleKeychain::KeychainTestData keychain_only_data = {
kSecAuthenticationTypeHTMLForm, "keychain.only.com",
kSecProtocolTypeHTTP, NULL, 0, NULL, "20020601171500Z",
"keychain", "only", false
};
keychain()->AddTestItem(keychain_only_data);
struct UpdateData {
PasswordFormData form_data;
const char* password; // NULL indicates no entry should be present.
};
// Make a series of update calls.
UpdateData updates[] = {
// Update the keychain+db passwords (the normal password update case).
{ { PasswordForm::SCHEME_HTML, "http://some.domain.com/",
"http://some.domain.com/insecure.html", "login.cgi",
L"username", L"password", L"submit", L"joe_user", L"53krit",
true, false, 2 },
"53krit",
},
// Update the keychain-only password; this simulates the initial use of a
// password stored by another browsers.
{ { PasswordForm::SCHEME_HTML, "http://keychain.only.com/",
"http://keychain.only.com/login.html", "login.cgi",
L"username", L"password", L"submit", L"keychain", L"only",
true, false, 2 },
"only",
},
// Update a password that doesn't exist in either location. This tests the
// case where a form is filled, then the stored login is removed, then the
// form is submitted.
{ { PasswordForm::SCHEME_HTML, "http://different.com/",
"http://different.com/index.html", "login.cgi",
L"username", L"password", L"submit", L"abc", L"123",
true, false, 2 },
NULL,
},
};
for (unsigned int i = 0; i < arraysize(updates); ++i) {
scoped_ptr<PasswordForm> form =
CreatePasswordFormFromDataForTesting(updates[i].form_data);
store_->UpdateLogin(*form);
}
FinishAsyncProcessing();
MacKeychainPasswordFormAdapter keychain_adapter(keychain());
for (unsigned int i = 0; i < arraysize(updates); ++i) {
scoped_ptr<PasswordForm> query_form =
CreatePasswordFormFromDataForTesting(updates[i].form_data);
ScopedVector<autofill::PasswordForm> matching_items =
keychain_adapter.PasswordsFillingForm(query_form->signon_realm,
query_form->scheme);
if (updates[i].password) {
EXPECT_GT(matching_items.size(), 0U) << "iteration " << i;
if (matching_items.size() >= 1)
EXPECT_EQ(ASCIIToUTF16(updates[i].password),
matching_items[0]->password_value) << "iteration " << i;
} else {
EXPECT_EQ(0U, matching_items.size()) << "iteration " << i;
}
EXPECT_TRUE(login_db()->GetLogins(*query_form, &matching_items));
EXPECT_EQ(updates[i].password ? 1U : 0U, matching_items.size())
<< "iteration " << i;
}
}
TEST_F(PasswordStoreMacTest, TestDBKeychainAssociation) {
// Tests that association between the keychain and login database parts of a
// password added by fuzzy (PSL) matching works.
// 1. Add a password for www.facebook.com
// 2. Get a password for m.facebook.com. This fuzzy matches and returns the
// www.facebook.com password.
// 3. Add the returned password for m.facebook.com.
// 4. Remove both passwords.
// -> check: that both are gone from the login DB and the keychain
// This test should in particular ensure that we don't keep passwords in the
// keychain just before we think we still have other (fuzzy-)matching entries
// for them in the login database. (For example, here if we deleted the
// www.facebook.com password from the login database, we should not be blocked
// from deleting it from the keystore just becaus the m.facebook.com password
// fuzzy-matches the www.facebook.com one.)
// 1. Add a password for www.facebook.com
PasswordFormData www_form_data = {
PasswordForm::SCHEME_HTML, "http://www.facebook.com/",
"http://www.facebook.com/index.html", "login",
L"username", L"password", L"submit", L"joe_user", L"sekrit", true, false, 1
};
scoped_ptr<PasswordForm> www_form =
CreatePasswordFormFromDataForTesting(www_form_data);
EXPECT_EQ(AddChangeForForm(*www_form), login_db()->AddLogin(*www_form));
MacKeychainPasswordFormAdapter owned_keychain_adapter(keychain());
owned_keychain_adapter.SetFindsOnlyOwnedItems(true);
owned_keychain_adapter.AddPassword(*www_form);
// 2. Get a password for m.facebook.com.
PasswordForm m_form(*www_form);
m_form.signon_realm = "http://m.facebook.com";
m_form.origin = GURL("http://m.facebook.com/index.html");
MockPasswordStoreConsumer consumer;
store_->GetLogins(m_form, PasswordStore::ALLOW_PROMPT, &consumer);
PasswordForm returned_form;
EXPECT_CALL(consumer, OnGetPasswordStoreResultsConstRef(SizeIs(1u)))
.WillOnce(
DoAll(SaveACopyOfFirstForm(&returned_form), QuitUIMessageLoop()));
base::MessageLoop::current()->Run();
// 3. Add the returned password for m.facebook.com.
EXPECT_EQ(AddChangeForForm(returned_form),
login_db()->AddLogin(returned_form));
owned_keychain_adapter.AddPassword(m_form);
// 4. Remove both passwords.
store_->RemoveLogin(*www_form);
store_->RemoveLogin(m_form);
FinishAsyncProcessing();
// No trace of www.facebook.com.
ScopedVector<autofill::PasswordForm> matching_items =
owned_keychain_adapter.PasswordsFillingForm(www_form->signon_realm,
www_form->scheme);
EXPECT_EQ(0u, matching_items.size());
EXPECT_TRUE(login_db()->GetLogins(*www_form, &matching_items));
EXPECT_EQ(0u, matching_items.size());
// No trace of m.facebook.com.
matching_items = owned_keychain_adapter.PasswordsFillingForm(
m_form.signon_realm, m_form.scheme);
EXPECT_EQ(0u, matching_items.size());
EXPECT_TRUE(login_db()->GetLogins(m_form, &matching_items));
EXPECT_EQ(0u, matching_items.size());
}
namespace {
class PasswordsChangeObserver :
public password_manager::PasswordStore::Observer {
public:
PasswordsChangeObserver(PasswordStoreMac* store) : observer_(this) {
observer_.Add(store);
}
void WaitAndVerify(PasswordStoreMacTest* test) {
test->FinishAsyncProcessing();
::testing::Mock::VerifyAndClearExpectations(this);
}
// password_manager::PasswordStore::Observer:
MOCK_METHOD1(OnLoginsChanged,
void(const password_manager::PasswordStoreChangeList& changes));
private:
ScopedObserver<password_manager::PasswordStore,
PasswordsChangeObserver> observer_;
};
password_manager::PasswordStoreChangeList GetAddChangeList(
const PasswordForm& form) {
password_manager::PasswordStoreChange change(
password_manager::PasswordStoreChange::ADD, form);
return password_manager::PasswordStoreChangeList(1, change);
}
// Tests RemoveLoginsCreatedBetween or RemoveLoginsSyncedBetween depending on
// |check_created|.
void CheckRemoveLoginsBetween(PasswordStoreMacTest* test, bool check_created) {
PasswordFormData www_form_data_facebook = {
PasswordForm::SCHEME_HTML, "http://www.facebook.com/",
"http://www.facebook.com/index.html", "login", L"submit", L"username",
L"password", L"joe_user", L"sekrit", true, false, 0 };
// The old form doesn't have elements names.
PasswordFormData www_form_data_facebook_old = {
PasswordForm::SCHEME_HTML, "http://www.facebook.com/",
"http://www.facebook.com/index.html", "login", L"", L"",
L"", L"joe_user", L"oldsekrit", true, false, 0 };
PasswordFormData www_form_data_other = {
PasswordForm::SCHEME_HTML, "http://different.com/",
"http://different.com/index.html", "login", L"submit", L"username",
L"password", L"different_joe_user", L"sekrit", true, false, 0 };
scoped_ptr<PasswordForm> form_facebook =
CreatePasswordFormFromDataForTesting(www_form_data_facebook);
scoped_ptr<PasswordForm> form_facebook_old =
CreatePasswordFormFromDataForTesting(www_form_data_facebook_old);
scoped_ptr<PasswordForm> form_other =
CreatePasswordFormFromDataForTesting(www_form_data_other);
base::Time now = base::Time::Now();
// TODO(vasilii): remove the next line once crbug/374132 is fixed.
now = base::Time::FromTimeT(now.ToTimeT());
base::Time next_day = now + base::TimeDelta::FromDays(1);
if (check_created) {
form_facebook_old->date_created = now;
form_facebook->date_created = next_day;
form_other->date_created = next_day;
} else {
form_facebook_old->date_synced = now;
form_facebook->date_synced = next_day;
form_other->date_synced = next_day;
}
PasswordsChangeObserver observer(test->store());
test->store()->AddLogin(*form_facebook_old);
test->store()->AddLogin(*form_facebook);
test->store()->AddLogin(*form_other);
EXPECT_CALL(observer, OnLoginsChanged(GetAddChangeList(*form_facebook_old)));
EXPECT_CALL(observer, OnLoginsChanged(GetAddChangeList(*form_facebook)));
EXPECT_CALL(observer, OnLoginsChanged(GetAddChangeList(*form_other)));
observer.WaitAndVerify(test);
// Check the keychain content.
MacKeychainPasswordFormAdapter owned_keychain_adapter(test->keychain());
owned_keychain_adapter.SetFindsOnlyOwnedItems(false);
ScopedVector<PasswordForm> matching_items(
owned_keychain_adapter.PasswordsFillingForm(form_facebook->signon_realm,
form_facebook->scheme));
EXPECT_EQ(1u, matching_items.size());
matching_items = owned_keychain_adapter.PasswordsFillingForm(
form_other->signon_realm, form_other->scheme);
EXPECT_EQ(1u, matching_items.size());
// Remove facebook.
if (check_created) {
test->store()->RemoveLoginsCreatedBetween(base::Time(), next_day,
base::Closure());
} else {
test->store()->RemoveLoginsSyncedBetween(base::Time(), next_day);
}
password_manager::PasswordStoreChangeList list;
form_facebook_old->password_value.clear();
form_facebook->password_value.clear();
list.push_back(password_manager::PasswordStoreChange(
password_manager::PasswordStoreChange::REMOVE, *form_facebook_old));
list.push_back(password_manager::PasswordStoreChange(
password_manager::PasswordStoreChange::REMOVE, *form_facebook));
EXPECT_CALL(observer, OnLoginsChanged(list));
list.clear();
observer.WaitAndVerify(test);
matching_items = owned_keychain_adapter.PasswordsFillingForm(
form_facebook->signon_realm, form_facebook->scheme);
EXPECT_EQ(0u, matching_items.size());
matching_items = owned_keychain_adapter.PasswordsFillingForm(
form_other->signon_realm, form_other->scheme);
EXPECT_EQ(1u, matching_items.size());
// Remove form_other.
if (check_created) {
test->store()->RemoveLoginsCreatedBetween(next_day, base::Time(),
base::Closure());
} else {
test->store()->RemoveLoginsSyncedBetween(next_day, base::Time());
}
form_other->password_value.clear();
list.push_back(password_manager::PasswordStoreChange(
password_manager::PasswordStoreChange::REMOVE, *form_other));
EXPECT_CALL(observer, OnLoginsChanged(list));
observer.WaitAndVerify(test);
matching_items = owned_keychain_adapter.PasswordsFillingForm(
form_other->signon_realm, form_other->scheme);
EXPECT_EQ(0u, matching_items.size());
}
} // namespace
TEST_F(PasswordStoreMacTest, TestRemoveLoginsCreatedBetween) {
CheckRemoveLoginsBetween(this, true);
}
TEST_F(PasswordStoreMacTest, TestRemoveLoginsSyncedBetween) {
CheckRemoveLoginsBetween(this, false);
}
TEST_F(PasswordStoreMacTest, TestRemoveLoginsMultiProfile) {
// Make sure that RemoveLoginsCreatedBetween does affect only the correct
// profile.
// Add a third-party password.
MockAppleKeychain::KeychainTestData keychain_data = {
kSecAuthenticationTypeHTMLForm, "some.domain.com",
kSecProtocolTypeHTTP, "/insecure.html", 0, NULL, "20020601171500Z",
"joe_user", "sekrit", false };
keychain()->AddTestItem(keychain_data);
// Add a password through the adapter. It has the "Chrome" creator tag.
// However, it's not referenced by the password database.
MacKeychainPasswordFormAdapter owned_keychain_adapter(keychain());
owned_keychain_adapter.SetFindsOnlyOwnedItems(true);
PasswordFormData www_form_data1 = {
PasswordForm::SCHEME_HTML, "http://www.facebook.com/",
"http://www.facebook.com/index.html", "login", L"username", L"password",
L"submit", L"joe_user", L"sekrit", true, false, 1 };
scoped_ptr<PasswordForm> www_form =
CreatePasswordFormFromDataForTesting(www_form_data1);
EXPECT_TRUE(owned_keychain_adapter.AddPassword(*www_form));
// Add a password from the current profile.
PasswordFormData www_form_data2 = {
PasswordForm::SCHEME_HTML, "http://www.facebook.com/",
"http://www.facebook.com/index.html", "login", L"username", L"password",
L"submit", L"not_joe_user", L"12345", true, false, 1 };
www_form = CreatePasswordFormFromDataForTesting(www_form_data2);
store_->AddLogin(*www_form);
FinishAsyncProcessing();
ScopedVector<PasswordForm> matching_items;
EXPECT_TRUE(login_db()->GetLogins(*www_form, &matching_items));
EXPECT_EQ(1u, matching_items.size());
store_->RemoveLoginsCreatedBetween(base::Time(), base::Time(),
base::Closure());
FinishAsyncProcessing();
// Check the second facebook form is gone.
EXPECT_TRUE(login_db()->GetLogins(*www_form, &matching_items));
EXPECT_EQ(0u, matching_items.size());
// Check the first facebook form is still there.
matching_items = owned_keychain_adapter.PasswordsFillingForm(
www_form->signon_realm, www_form->scheme);
ASSERT_EQ(1u, matching_items.size());
EXPECT_EQ(ASCIIToUTF16("joe_user"), matching_items[0]->username_value);
// Check the third-party password is still there.
owned_keychain_adapter.SetFindsOnlyOwnedItems(false);
matching_items = owned_keychain_adapter.PasswordsFillingForm(
"http://some.domain.com/insecure.html", PasswordForm::SCHEME_HTML);
ASSERT_EQ(1u, matching_items.size());
}
// Add a facebook form to the store but not to the keychain. The form is to be
// implicitly deleted. However, the observers shouldn't get notified about
// deletion of non-existent forms like m.facebook.com.
TEST_F(PasswordStoreMacTest, SilentlyRemoveOrphanedForm) {
testing::StrictMock<MockPasswordStoreObserver> mock_observer;
store()->AddObserver(&mock_observer);
// 1. Add a password for www.facebook.com to the LoginDatabase.
PasswordFormData www_form_data = {
PasswordForm::SCHEME_HTML, "http://www.facebook.com/",
"http://www.facebook.com/index.html", "login",
L"username", L"password", L"submit", L"joe_user", L"", true, false, 1
};
scoped_ptr<PasswordForm> www_form(
CreatePasswordFormFromDataForTesting(www_form_data));
EXPECT_EQ(AddChangeForForm(*www_form), login_db()->AddLogin(*www_form));
// 2. Get a PSL-matched password for m.facebook.com. The observer isn't
// notified because the form isn't in the database.
PasswordForm m_form(*www_form);
m_form.signon_realm = "http://m.facebook.com";
m_form.origin = GURL("http://m.facebook.com/index.html");
MockPasswordStoreConsumer consumer;
ON_CALL(consumer, OnGetPasswordStoreResultsConstRef(_))
.WillByDefault(QuitUIMessageLoop());
EXPECT_CALL(mock_observer, OnLoginsChanged(_)).Times(0);
// The PSL-matched form isn't returned because there is no actual password in
// the keychain.
EXPECT_CALL(consumer, OnGetPasswordStoreResultsConstRef(IsEmpty()));
store_->GetLogins(m_form, PasswordStore::ALLOW_PROMPT, &consumer);
base::MessageLoop::current()->Run();
ScopedVector<autofill::PasswordForm> all_forms;
EXPECT_TRUE(login_db()->GetAutofillableLogins(&all_forms));
EXPECT_EQ(1u, all_forms.size());
::testing::Mock::VerifyAndClearExpectations(&mock_observer);
// 3. Get a password for www.facebook.com. The form is implicitly removed and
// the observer is notified.
password_manager::PasswordStoreChangeList list;
list.push_back(password_manager::PasswordStoreChange(
password_manager::PasswordStoreChange::REMOVE, *www_form));
EXPECT_CALL(mock_observer, OnLoginsChanged(list));
EXPECT_CALL(consumer, OnGetPasswordStoreResultsConstRef(IsEmpty()));
store_->GetLogins(*www_form, PasswordStore::ALLOW_PROMPT, &consumer);
base::MessageLoop::current()->Run();
EXPECT_TRUE(login_db()->GetAutofillableLogins(&all_forms));
EXPECT_EQ(0u, all_forms.size());
}
// Verify that Android app passwords can be stored, retrieved, and deleted.
// Regression test for http://crbug.com/455551
TEST_F(PasswordStoreMacTest, StoringAndRetrievingAndroidCredentials) {
PasswordForm form;
form.signon_realm = "android://[email protected]/";
form.username_value = base::UTF8ToUTF16("randomusername");
form.password_value = base::UTF8ToUTF16("password");
VerifyCredentialLifecycle(form);
}
// Verify that federated credentials can be stored, retrieved and deleted.
TEST_F(PasswordStoreMacTest, StoringAndRetrievingFederatedCredentials) {
PasswordForm form;
form.signon_realm = "android://[email protected]/";
form.federation_url = GURL(password_manager::kTestingFederationUrlSpec);
form.username_value = base::UTF8ToUTF16("randomusername");
form.password_value = base::UTF8ToUTF16(""); // No password.
VerifyCredentialLifecycle(form);
}
void CheckMigrationResult(PasswordStoreMac::MigrationResult expected_result,
PasswordStoreMac::MigrationResult result) {
EXPECT_EQ(expected_result, result);
QuitUIMessageLoop();
}
// Import the passwords from the Keychain to LoginDatabase.
TEST_F(PasswordStoreMacTest, ImportFromKeychain) {
PasswordForm form1;
form1.origin = GURL("http://accounts.google.com/LoginAuth");
form1.signon_realm = "http://accounts.google.com/";
form1.username_value = ASCIIToUTF16("my_username");
form1.password_value = ASCIIToUTF16("my_password");
PasswordForm form2;
form2.origin = GURL("http://facebook.com/Login");
form2.signon_realm = "http://facebook.com/";
form2.username_value = ASCIIToUTF16("my_username");
form2.password_value = ASCIIToUTF16("my_password");
PasswordForm blacklisted_form;
blacklisted_form.origin = GURL("http://badsite.com/Login");
blacklisted_form.signon_realm = "http://badsite.com/";
blacklisted_form.blacklisted_by_user = true;
store()->AddLogin(form1);
store()->AddLogin(form2);
store()->AddLogin(blacklisted_form);
FinishAsyncProcessing();
ASSERT_TRUE(base::PostTaskAndReplyWithResult(
thread_->task_runner().get(), FROM_HERE,
base::Bind(&PasswordStoreMac::ImportFromKeychain, store()),
base::Bind(&CheckMigrationResult, PasswordStoreMac::MIGRATION_OK)));
FinishAsyncProcessing();
// The password should be stored in the database by now.
ScopedVector<PasswordForm> matching_items;
EXPECT_TRUE(login_db()->GetLogins(form1, &matching_items));
ASSERT_EQ(1u, matching_items.size());
EXPECT_EQ(form1, *matching_items[0]);
EXPECT_TRUE(login_db()->GetLogins(form2, &matching_items));
ASSERT_EQ(1u, matching_items.size());
EXPECT_EQ(form2, *matching_items[0]);
EXPECT_TRUE(login_db()->GetLogins(blacklisted_form, &matching_items));
ASSERT_EQ(1u, matching_items.size());
EXPECT_EQ(blacklisted_form, *matching_items[0]);
// The passwords are encrypted using a key from the Keychain.
EXPECT_TRUE(histogram_tester_->GetHistogramSamplesSinceCreation(
"OSX.Keychain.Access")->TotalCount());
histogram_tester_.reset();
}
// Import a federated credential while the Keychain is locked.
TEST_F(PasswordStoreMacTest, ImportFederatedFromLockedKeychain) {
keychain()->set_locked(true);
PasswordForm form1;
form1.origin = GURL("http://example.com/Login");
form1.signon_realm = "http://example.com/";
form1.username_value = ASCIIToUTF16("my_username");
form1.federation_url = GURL("https://accounts.google.com/");
store()->AddLogin(form1);
FinishAsyncProcessing();
ASSERT_TRUE(base::PostTaskAndReplyWithResult(
thread_->task_runner().get(), FROM_HERE,
base::Bind(&PasswordStoreMac::ImportFromKeychain, store()),
base::Bind(&CheckMigrationResult, PasswordStoreMac::MIGRATION_OK)));
FinishAsyncProcessing();
ScopedVector<PasswordForm> matching_items;
EXPECT_TRUE(login_db()->GetLogins(form1, &matching_items));
ASSERT_EQ(1u, matching_items.size());
EXPECT_EQ(form1, *matching_items[0]);
}
// Try to import while the Keychain is locked but the encryption key had been
// read earlier.
TEST_F(PasswordStoreMacTest, ImportFromLockedKeychainError) {
PasswordForm form1;
form1.origin = GURL("http://accounts.google.com/LoginAuth");
form1.signon_realm = "http://accounts.google.com/";
form1.username_value = ASCIIToUTF16("my_username");
form1.password_value = ASCIIToUTF16("my_password");
store()->AddLogin(form1);
FinishAsyncProcessing();
// Add a second keychain item matching the Database entry.
PasswordForm form2 = form1;
form2.origin = GURL("http://accounts.google.com/Login");
form2.password_value = ASCIIToUTF16("1234");
MacKeychainPasswordFormAdapter adapter(keychain());
EXPECT_TRUE(adapter.AddPassword(form2));
keychain()->set_locked(true);
ASSERT_TRUE(base::PostTaskAndReplyWithResult(
thread_->task_runner().get(), FROM_HERE,
base::Bind(&PasswordStoreMac::ImportFromKeychain, store()),
base::Bind(&CheckMigrationResult, PasswordStoreMac::KEYCHAIN_BLOCKED)));
FinishAsyncProcessing();
ScopedVector<PasswordForm> matching_items;
EXPECT_TRUE(login_db()->GetLogins(form1, &matching_items));
ASSERT_EQ(1u, matching_items.size());
EXPECT_EQ(base::string16(), matching_items[0]->password_value);
histogram_tester_->ExpectUniqueSample(
"PasswordManager.KeychainMigration.NumPasswordsOnFailure", 1, 1);
histogram_tester_->ExpectUniqueSample(
"PasswordManager.KeychainMigration.NumFailedPasswords", 1, 1);
histogram_tester_->ExpectUniqueSample(
"PasswordManager.KeychainMigration.NumChromeOwnedInaccessiblePasswords",
2, 1);
// Don't test the encryption key access.
histogram_tester_.reset();
}
|
lihui7115/ChromiumGStreamerBackend
|
chrome/browser/password_manager/password_store_mac_unittest.cc
|
C++
|
bsd-3-clause
| 78,380 |
Markdown to presentation
========================
A simple tool for creating presentations from markdown files.
License
=======
md2p is licensed under BSD3 license. This refers to `src/Main.hs` and `output/js/present.js` files.
**Note:** any other content is not a part of md2p and is not licensed by it.
Example
-------
- [Source](https://github.com/soupi/markdown-to-presentation/blob/gh-pages/input/Example.md)
- [Result](http://soupi.github.io/markdown-to-presentation/)
How To Use:
------
1. Create a Markdown file and seperate slides with `---` (html's `<hr>` tag)
2. Use _md2p_ tool (haskell compiler and cabal needed to compile md2p) to create an HTML file from your Markdown and place it in the output folder
You can also use any other md2html converter. just add the following to the html:
```html
<link rel="stylesheet" type="text/css" href="css/style.css">
<link rel="stylesheet" type="text/css" href="css/github.css">
<link rel="stylesheet" type="text/css" href="highlight/styles/solarized_light.css"> <!-- Or your preferable syntax highlight theme -->
<script src="js/jquery-1.11.0.min.js"></script>
<script src="highlight/highlight.pack.js"></script>
<script>hljs.initHighlightingOnLoad();</script>
<script src="js/present.js"></script>
```
Actually, you can omit everything except:
```html
<script src="js/jquery-1.11.0.min.js"></script>
<script src="js/present.js"></script>
```
but then styling is up to you.
How to Install md2p:
-------------------
use cabal to download dependencies and install md2p
```
cabal update && cabal install
```
Packages and Libraries used to create m2p:
------------------------------------------
- [markdown](http://hackage.haskell.org/package/markdown) - for markdown to html conversion
- [highlight.js](https://highlightjs.org/) - for syntax highlight
- [a slighty modified github.css](https://gist.github.com/andyferra/2554919) - for styling
|
soupi/markdown-to-presentation
|
README.md
|
Markdown
|
bsd-3-clause
| 1,911 |
/*
-- MAGMA (version 1.5.0-beta3) --
Univ. of Tennessee, Knoxville
Univ. of California, Berkeley
Univ. of Colorado, Denver
@date July 2014
@generated from magma_z_init.cpp normal z -> s, Fri Jul 18 17:34:30 2014
@author Hartwig Anzt
*/
#include <fstream>
#include <stdlib.h>
#include <string>
#include <sstream>
#include <iostream>
#include <ostream>
#include <assert.h>
#include <stdio.h>
#include "../include/magmasparse_s.h"
#include "../../include/magma.h"
#include "../include/mmio.h"
using namespace std;
/**
Purpose
-------
Initialize a magma_s_vector.
Arguments
---------
@param
x magma_s_vector
vector to initialize
@param
mem_loc magma_location_t
memory for vector
@param
num_rows magma_int_t
desired length of vector
@param
values float
entries in vector
@ingroup magmasparse_saux
********************************************************************/
magma_int_t
magma_s_vinit( magma_s_vector *x,
magma_location_t mem_loc,
magma_int_t num_rows,
float values ){
x->memory_location = Magma_CPU;
x->num_rows = num_rows;
x->nnz = num_rows;
if( mem_loc == Magma_CPU ){
x->memory_location = Magma_CPU;
magma_smalloc_cpu( &x->val, num_rows );
if ( x->val == NULL )
return MAGMA_ERR_HOST_ALLOC;
for( magma_int_t i=0; i<num_rows; i++)
x->val[i] = values;
return MAGMA_SUCCESS;
}
else if( mem_loc == Magma_DEV ){
x->memory_location = Magma_DEV;
float *tmp;
magma_smalloc_cpu( &tmp, num_rows );
if ( tmp == NULL )
return MAGMA_ERR_HOST_ALLOC;
for( magma_int_t i=0; i<num_rows; i++)
tmp[i] = values;
if (MAGMA_SUCCESS != magma_smalloc( &x->val, x->num_rows))
return MAGMA_ERR_DEVICE_ALLOC;
// data transfer
magma_ssetvector( x->num_rows, tmp, 1, x->val, 1 );
magma_free_cpu(tmp);
return MAGMA_SUCCESS;
}
return MAGMA_SUCCESS;
}
|
EmergentOrder/magma
|
sparse-iter/control/magma_s_init.cpp
|
C++
|
bsd-3-clause
| 2,249 |
//To Test:http://localhost:8080/nbia-auth/services/v3/getProtectionGrpList?format=html
package gov.nih.nci.nbia.restAPI;
import gov.nih.nci.nbia.dao.TrialDataProvenanceDAO;
import gov.nih.nci.nbia.util.SpringApplicationContext;
import gov.nih.nci.security.SecurityServiceProvider;
import gov.nih.nci.security.UserProvisioningManager;
import gov.nih.nci.security.authorization.domainobjects.ProtectionGroup;
import gov.nih.nci.security.authorization.domainobjects.ProtectionElement;
import gov.nih.nci.security.authorization.domainobjects.Role;
import gov.nih.nci.security.dao.RoleSearchCriteria;
import gov.nih.nci.security.dao.SearchCriteria;
import gov.nih.nci.security.exceptions.CSConfigurationException;
import gov.nih.nci.security.exceptions.CSException;
import java.util.ArrayList;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.Path;
import javax.ws.rs.GET;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import org.springframework.dao.DataAccessException;
@Path("/v3/getProtectionGrpList")
public class V3_getProtectionGrpList extends getData{
private static final String[] columns={"pgName", "description", "dataSetName"};
public final static String TEXT_CSV = "text/csv";
@Context private HttpServletRequest httpRequest;
/**
* This method get a list of names of protection group
*
* @return String - list of names of protection group
*/
@GET
@Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON, MediaType.TEXT_HTML, TEXT_CSV})
public Response constructResponse(@QueryParam("format") String format) {
List<Object[]> data = null;
try {
UserProvisioningManager upm = getUpm();
java.util.List<ProtectionGroup> protectionGrpLst = upm.getProtectionGroups();
if ( protectionGrpLst != null) {
data = new ArrayList<Object []>();
for(ProtectionGroup pg : protectionGrpLst) {
List<ProtectionElement> pes = new ArrayList<ProtectionElement>(upm.getProtectionElements(pg.getProtectionGroupId().toString()));
for (ProtectionElement pe : pes) {
Object [] objs = {pg.getProtectionGroupName(),
pg.getProtectionGroupDescription(),
pe.getProtectionElementName()};
data.add(objs);
}
}
}
else {
Object [] objs = {"Warning: No Protection Group has defined yet!", "NA", "NA"};
data.add(objs);
}
} catch (CSConfigurationException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (CSException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return formatResponse(format, data, columns);
}
}
|
NCIP/national-biomedical-image-archive
|
software/nbia-api/src/gov/nih/nci/nbia/restAPI/V3_getProtectionGrpList.java
|
Java
|
bsd-3-clause
| 2,741 |
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CONTENT_BROWSER_RENDERER_HOST_CLIPBOARD_MESSAGE_FILTER_H_
#define CONTENT_BROWSER_RENDERER_HOST_CLIPBOARD_MESSAGE_FILTER_H_
#include <string>
#include <vector>
#include "base/basictypes.h"
#include "content/public/browser/browser_message_filter.h"
#include "ui/base/clipboard/clipboard.h"
class GURL;
namespace content {
class ClipboardMessageFilter : public BrowserMessageFilter {
public:
ClipboardMessageFilter();
virtual void OverrideThreadForMessage(
const IPC::Message& message,
BrowserThread::ID* thread) OVERRIDE;
virtual bool OnMessageReceived(const IPC::Message& message,
bool* message_was_ok) OVERRIDE;
private:
virtual ~ClipboardMessageFilter();
void OnWriteObjectsAsync(const ui::Clipboard::ObjectMap& objects);
void OnWriteObjectsSync(const ui::Clipboard::ObjectMap& objects,
base::SharedMemoryHandle bitmap_handle);
void OnGetSequenceNumber(const ui::Clipboard::Buffer buffer,
uint64* sequence_number);
void OnIsFormatAvailable(const ui::Clipboard::FormatType& format,
ui::Clipboard::Buffer buffer,
bool* result);
void OnClear(ui::Clipboard::Buffer buffer);
void OnReadAvailableTypes(ui::Clipboard::Buffer buffer,
std::vector<string16>* types,
bool* contains_filenames);
void OnReadText(ui::Clipboard::Buffer buffer, string16* result);
void OnReadAsciiText(ui::Clipboard::Buffer buffer, std::string* result);
void OnReadHTML(ui::Clipboard::Buffer buffer, string16* markup, GURL* url,
uint32* fragment_start, uint32* fragment_end);
void OnReadRTF(ui::Clipboard::Buffer buffer, std::string* result);
void OnReadImage(ui::Clipboard::Buffer buffer, IPC::Message* reply_msg);
void OnReadImageReply(const SkBitmap& bitmap, IPC::Message* reply_msg);
void OnReadCustomData(ui::Clipboard::Buffer buffer,
const string16& type,
string16* result);
#if defined(OS_MACOSX)
void OnFindPboardWriteString(const string16& text);
#endif
// We have our own clipboard because we want to access the clipboard on the
// IO thread instead of forwarding (possibly synchronous) messages to the UI
// thread. This instance of the clipboard should be accessed only on the IO
// thread.
static ui::Clipboard* GetClipboard();
DISALLOW_COPY_AND_ASSIGN(ClipboardMessageFilter);
};
} // namespace content
#endif // CONTENT_BROWSER_RENDERER_HOST_CLIPBOARD_MESSAGE_FILTER_H_
|
junmin-zhu/chromium-rivertrail
|
content/browser/renderer_host/clipboard_message_filter.h
|
C
|
bsd-3-clause
| 2,770 |
# Copyright (c) 2013 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Package containing the different outputs.
Each output type is defined inside a module.
"""
|
v-legoff/croissant
|
croissant/output/__init__.py
|
Python
|
bsd-3-clause
| 1,636 |
/*
* Copyright (c) 2013-2013, KNOPFLERFISH project
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following
* conditions are met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials
* provided with the distribution.
*
* - Neither the name of the KNOPFLERFISH project nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.knopflerfish.service.repositorymanager;
import org.osgi.framework.Constants;
import org.osgi.framework.ServiceReference;
import org.osgi.service.repository.Repository;
public class RepositoryInfo implements Comparable<RepositoryInfo> {
final private long id;
final private int rank;
final ServiceReference<Repository> sr;
public RepositoryInfo(ServiceReference<Repository> sr) {
this.id = ((Long)sr.getProperty(Constants.SERVICE_ID)).longValue();
Object r = sr.getProperty(Constants.SERVICE_RANKING);
if (r != null && r instanceof Integer) {
this.rank = ((Integer)r).intValue();
} else {
this.rank = 0;
}
this.sr = sr;
}
public RepositoryInfo(RepositoryInfo old, int rank) {
this.id = old.id;
this.rank = rank;
this.sr = old.sr;
}
public long getId() {
return id;
}
public int getRank() {
return rank;
}
public Object getProperty(String prop) {
return sr.getProperty(prop);
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + (int) (id ^ (id >>> 32));
return result;
}
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null)
return false;
if (getClass() != o.getClass())
return false;
RepositoryInfo rio = (RepositoryInfo) o;
if (id != rio.id || rank != rio.rank)
return false;
return true;
}
@Override
public int compareTo(RepositoryInfo o) {
if (equals(o)) {
return 0;
}
if (rank != o.rank) {
return o.rank - rank;
} else {
return id < o.id ? -1 : 1;
}
}
public ServiceReference<Repository> getServiceReference() {
return sr;
}
@Override
public String toString() {
return "RepositoryInfo [id=" + id + ", rank=" + rank + "]";
}
}
|
knopflerfish/knopflerfish.org
|
osgi/bundles/repository/repositorymanager/src/org/knopflerfish/service/repositorymanager/RepositoryInfo.java
|
Java
|
bsd-3-clause
| 3,464 |
//------------------------------------------------------------------------------
// GB_Descriptor_get: get the status of a descriptor
//------------------------------------------------------------------------------
// SuiteSparse:GraphBLAS, Timothy A. Davis, (c) 2017-2018, All Rights Reserved.
// http://suitesparse.com See GraphBLAS/Doc/License.txt for license.
//------------------------------------------------------------------------------
// A descriptor modifies how the behavoir of a GraphBLAS operation. In the
// current GraphBLAS spec, the following descriptor fields may be set.
// Descriptor field: Descriptor value:
// desc->out GxB_DEFAULT or GrB_REPLACE
// GrB_REPLACE means that the output matrix C is cleared just
// prior to writing results back into it, via C<Mask> = results. This
// descriptor does not affect how C is used to compute the results. If
// GxB_DEFAULT, then C is not cleared before doing C<Mask>=results.
// desc->mask GxB_DEFAULT or GrB_SCMP
// An optional 'write mask' defines how the results are to be written back
// into C. The boolean Mask matrix has the same size as C (Mask is
// typecasted to boolean if it has another type). If the Mask input to
// the GraphBLAS method is NULL, then implicitly Mask(i,j)=1 for all i and
// j. Let Z be the results to be written into C (the same dimension as
// C). If desc->mask is GxB_DEFAULT, and Mask(i,j)=1, then C(i,j) is
// over-written with Z(i,j). Otherwise, if Mask(i,j)=0 C(i,j) is left
// unmodified (it remains an implicit zero if it is so, or its value is
// unchanged if it has one). If desc->mask is GrB_SCMP, then the use of
// Mask is negated: Mask(i,j)=0 means that C(i,j) is overwritten with
// Z(i,j), and Mask(i,j)=1 means that C(i,j) is left unchanged.
// Writing results Z into C via the Mask is written as C<Mask>=Z in
// GraphBLAS notation.
// Note that it is the value of Mask(i,j) that determines how C(i,j) is
// overwritten. If the (i,j) entry is present in the Mask matrix data
// structure but has a numerical value of zero, then it is the same as if
// (i,j) is not present and thus implicitly zero. Both mean 'Mask(i,j)=0'
// in the description above of how the Mask works.
// desc->in0 and desc->in1 GxB_DEFAULT or GrB_TRAN
// A GrB_Matrix passed as an input parameter to GraphBLAS methods can
// optionally transpose them prior to using them. desc->in0 always refers
// to the first input to the method, and desc->in1 always refers to the
// second one.
// If the value of this descriptor is GxB_DEFAULT, then the matrix is used
// as-is. Otherwise, it is transposed first. That is, the results are
// the same as if the transpose of the matrix was passed to the method.
// desc->axb see GraphBLAS.h; can be:
// GrB_DEFAULT automatic selection
// GxB_AxB_GUSTAVSON gather-scatter saxpy method
// GxB_AxB_HEAP heap-based saxpy method
// GxB_AxB_DOT dot product
#include "GB.h"
GrB_Info GB_Descriptor_get // get the contents of a descriptor
(
const GrB_Descriptor desc, // descriptor to query, may be NULL
bool *C_replace, // if true replace C before C<Mask>=Z
bool *Mask_comp, // if true use logical negation of Mask
bool *In0_transpose, // if true transpose first input
bool *In1_transpose, // if true transpose second input
GrB_Desc_Value *AxB_method, // method for C=A*B
GB_Context Context
)
{
//--------------------------------------------------------------------------
// check inputs
//--------------------------------------------------------------------------
// desc may be null, but if not NULL it must be initialized
GB_RETURN_IF_FAULTY (desc) ;
//--------------------------------------------------------------------------
// get the contents of the descriptor
//--------------------------------------------------------------------------
// default values if descriptor is NULL
GrB_Desc_Value C_desc = GxB_DEFAULT ;
GrB_Desc_Value Mask_desc = GxB_DEFAULT ;
GrB_Desc_Value In0_desc = GxB_DEFAULT ;
GrB_Desc_Value In1_desc = GxB_DEFAULT ;
GrB_Desc_Value AxB_desc = GxB_DEFAULT ;
// non-defaults descriptors
if (desc != NULL)
{
// get the contents
C_desc = desc->out ; // DEFAULT or REPLACE
Mask_desc = desc->mask ; // DEFAULT or SCMP
In0_desc = desc->in0 ; // DEFAULT or TRAN
In1_desc = desc->in1 ; // DEFAULT or TRAN
AxB_desc = desc->axb ; // DEFAULT, GUSTAVSON, HEAP, or DOT
}
// check for valid values of each descriptor field
if (!(C_desc == GxB_DEFAULT || C_desc == GrB_REPLACE) ||
!(Mask_desc == GxB_DEFAULT || Mask_desc == GrB_SCMP) ||
!(In0_desc == GxB_DEFAULT || In0_desc == GrB_TRAN) ||
!(In1_desc == GxB_DEFAULT || In1_desc == GrB_TRAN) ||
!(AxB_desc == GxB_DEFAULT || AxB_desc == GxB_AxB_GUSTAVSON ||
AxB_desc == GxB_AxB_DOT || AxB_desc == GxB_AxB_HEAP))
{
return (GB_ERROR (GrB_INVALID_OBJECT, (GB_LOG, "Descriptor invalid"))) ;
}
if (C_replace != NULL)
{
*C_replace = (C_desc == GrB_REPLACE) ;
}
if (Mask_comp != NULL)
{
*Mask_comp = (Mask_desc == GrB_SCMP) ;
}
if (In0_transpose != NULL)
{
*In0_transpose = (In0_desc == GrB_TRAN) ;
}
if (In1_transpose != NULL)
{
*In1_transpose = (In1_desc == GrB_TRAN) ;
}
if (AxB_method != NULL)
{
*AxB_method = AxB_desc ;
}
return (GrB_SUCCESS) ;
}
|
jlblancoc/suitesparse-metis-for-windows
|
SuiteSparse/GraphBLAS/Source/GB_Descriptor_get.c
|
C
|
bsd-3-clause
| 5,829 |
/* this file has been autogenerated by vtkNodeJsWrap */
/* editing this might proof futile */
#define VTK_WRAPPING_CXX
#define VTK_STREAMS_FWD_ONLY
#include <nan.h>
#include "vtkObjectWrap.h"
#include "vtkAbstractContextItemWrap.h"
#include "vtkObjectBaseWrap.h"
#include "vtkContext2DWrap.h"
#include "vtkContextSceneWrap.h"
#include "../../plus/plus.h"
using namespace v8;
extern Nan::Persistent<v8::Object> vtkNodeJsNoWrap;
Nan::Persistent<v8::FunctionTemplate> VtkAbstractContextItemWrap::ptpl;
VtkAbstractContextItemWrap::VtkAbstractContextItemWrap()
{ }
VtkAbstractContextItemWrap::VtkAbstractContextItemWrap(vtkSmartPointer<vtkAbstractContextItem> _native)
{ native = _native; }
VtkAbstractContextItemWrap::~VtkAbstractContextItemWrap()
{ }
void VtkAbstractContextItemWrap::Init(v8::Local<v8::Object> exports)
{
Nan::SetAccessor(exports, Nan::New("vtkAbstractContextItem").ToLocalChecked(), ConstructorGetter);
Nan::SetAccessor(exports, Nan::New("AbstractContextItem").ToLocalChecked(), ConstructorGetter);
}
void VtkAbstractContextItemWrap::ConstructorGetter(
v8::Local<v8::String> property,
const Nan::PropertyCallbackInfo<v8::Value>& info)
{
InitPtpl();
info.GetReturnValue().Set(Nan::New(ptpl)->GetFunction());
}
void VtkAbstractContextItemWrap::InitPtpl()
{
if (!ptpl.IsEmpty()) return;
v8::Local<v8::FunctionTemplate> tpl = Nan::New<v8::FunctionTemplate>(New);
VtkObjectWrap::InitPtpl( );
tpl->Inherit(Nan::New<FunctionTemplate>(VtkObjectWrap::ptpl));
tpl->SetClassName(Nan::New("VtkAbstractContextItemWrap").ToLocalChecked());
tpl->InstanceTemplate()->SetInternalFieldCount(1);
Nan::SetPrototypeMethod(tpl, "ClearItems", ClearItems);
Nan::SetPrototypeMethod(tpl, "clearItems", ClearItems);
Nan::SetPrototypeMethod(tpl, "GetInteractive", GetInteractive);
Nan::SetPrototypeMethod(tpl, "getInteractive", GetInteractive);
Nan::SetPrototypeMethod(tpl, "GetParent", GetParent);
Nan::SetPrototypeMethod(tpl, "getParent", GetParent);
Nan::SetPrototypeMethod(tpl, "GetScene", GetScene);
Nan::SetPrototypeMethod(tpl, "getScene", GetScene);
Nan::SetPrototypeMethod(tpl, "GetVisible", GetVisible);
Nan::SetPrototypeMethod(tpl, "getVisible", GetVisible);
Nan::SetPrototypeMethod(tpl, "NewInstance", NewInstance);
Nan::SetPrototypeMethod(tpl, "newInstance", NewInstance);
Nan::SetPrototypeMethod(tpl, "Paint", Paint);
Nan::SetPrototypeMethod(tpl, "paint", Paint);
Nan::SetPrototypeMethod(tpl, "PaintChildren", PaintChildren);
Nan::SetPrototypeMethod(tpl, "paintChildren", PaintChildren);
Nan::SetPrototypeMethod(tpl, "ReleaseGraphicsResources", ReleaseGraphicsResources);
Nan::SetPrototypeMethod(tpl, "releaseGraphicsResources", ReleaseGraphicsResources);
Nan::SetPrototypeMethod(tpl, "RemoveItem", RemoveItem);
Nan::SetPrototypeMethod(tpl, "removeItem", RemoveItem);
Nan::SetPrototypeMethod(tpl, "SafeDownCast", SafeDownCast);
Nan::SetPrototypeMethod(tpl, "safeDownCast", SafeDownCast);
Nan::SetPrototypeMethod(tpl, "SetInteractive", SetInteractive);
Nan::SetPrototypeMethod(tpl, "setInteractive", SetInteractive);
Nan::SetPrototypeMethod(tpl, "SetParent", SetParent);
Nan::SetPrototypeMethod(tpl, "setParent", SetParent);
Nan::SetPrototypeMethod(tpl, "SetScene", SetScene);
Nan::SetPrototypeMethod(tpl, "setScene", SetScene);
Nan::SetPrototypeMethod(tpl, "SetVisible", SetVisible);
Nan::SetPrototypeMethod(tpl, "setVisible", SetVisible);
Nan::SetPrototypeMethod(tpl, "Update", Update);
Nan::SetPrototypeMethod(tpl, "update", Update);
#ifdef VTK_NODE_PLUS_VTKABSTRACTCONTEXTITEMWRAP_INITPTPL
VTK_NODE_PLUS_VTKABSTRACTCONTEXTITEMWRAP_INITPTPL
#endif
ptpl.Reset( tpl );
}
void VtkAbstractContextItemWrap::New(const Nan::FunctionCallbackInfo<v8::Value>& info)
{
if(!info.IsConstructCall())
{
Nan::ThrowError("Constructor not called in a construct call.");
return;
}
if(info.Length() == 0)
{
Nan::ThrowError("Cannot create instance of abstract class.");
return;
}
else
{
if(info[0]->ToObject() != vtkNodeJsNoWrap )
{
Nan::ThrowError("Parameter Error");
return;
}
}
info.GetReturnValue().Set(info.This());
}
void VtkAbstractContextItemWrap::ClearItems(const Nan::FunctionCallbackInfo<v8::Value>& info)
{
VtkAbstractContextItemWrap *wrapper = ObjectWrap::Unwrap<VtkAbstractContextItemWrap>(info.Holder());
vtkAbstractContextItem *native = (vtkAbstractContextItem *)wrapper->native.GetPointer();
if(info.Length() != 0)
{
Nan::ThrowError("Too many parameters.");
return;
}
native->ClearItems();
}
void VtkAbstractContextItemWrap::GetInteractive(const Nan::FunctionCallbackInfo<v8::Value>& info)
{
VtkAbstractContextItemWrap *wrapper = ObjectWrap::Unwrap<VtkAbstractContextItemWrap>(info.Holder());
vtkAbstractContextItem *native = (vtkAbstractContextItem *)wrapper->native.GetPointer();
bool r;
if(info.Length() != 0)
{
Nan::ThrowError("Too many parameters.");
return;
}
r = native->GetInteractive();
info.GetReturnValue().Set(Nan::New(r));
}
void VtkAbstractContextItemWrap::GetParent(const Nan::FunctionCallbackInfo<v8::Value>& info)
{
VtkAbstractContextItemWrap *wrapper = ObjectWrap::Unwrap<VtkAbstractContextItemWrap>(info.Holder());
vtkAbstractContextItem *native = (vtkAbstractContextItem *)wrapper->native.GetPointer();
vtkAbstractContextItem * r;
if(info.Length() != 0)
{
Nan::ThrowError("Too many parameters.");
return;
}
r = native->GetParent();
VtkAbstractContextItemWrap::InitPtpl();
v8::Local<v8::Value> argv[1] =
{ Nan::New(vtkNodeJsNoWrap) };
v8::Local<v8::Function> cons =
Nan::New<v8::FunctionTemplate>(VtkAbstractContextItemWrap::ptpl)->GetFunction();
v8::Local<v8::Object> wo = cons->NewInstance(1, argv);
VtkAbstractContextItemWrap *w = new VtkAbstractContextItemWrap();
w->native = r;
w->Wrap(wo);
info.GetReturnValue().Set(wo);
}
void VtkAbstractContextItemWrap::GetScene(const Nan::FunctionCallbackInfo<v8::Value>& info)
{
VtkAbstractContextItemWrap *wrapper = ObjectWrap::Unwrap<VtkAbstractContextItemWrap>(info.Holder());
vtkAbstractContextItem *native = (vtkAbstractContextItem *)wrapper->native.GetPointer();
vtkContextScene * r;
if(info.Length() != 0)
{
Nan::ThrowError("Too many parameters.");
return;
}
r = native->GetScene();
VtkContextSceneWrap::InitPtpl();
v8::Local<v8::Value> argv[1] =
{ Nan::New(vtkNodeJsNoWrap) };
v8::Local<v8::Function> cons =
Nan::New<v8::FunctionTemplate>(VtkContextSceneWrap::ptpl)->GetFunction();
v8::Local<v8::Object> wo = cons->NewInstance(1, argv);
VtkContextSceneWrap *w = new VtkContextSceneWrap();
w->native = r;
w->Wrap(wo);
info.GetReturnValue().Set(wo);
}
void VtkAbstractContextItemWrap::GetVisible(const Nan::FunctionCallbackInfo<v8::Value>& info)
{
VtkAbstractContextItemWrap *wrapper = ObjectWrap::Unwrap<VtkAbstractContextItemWrap>(info.Holder());
vtkAbstractContextItem *native = (vtkAbstractContextItem *)wrapper->native.GetPointer();
bool r;
if(info.Length() != 0)
{
Nan::ThrowError("Too many parameters.");
return;
}
r = native->GetVisible();
info.GetReturnValue().Set(Nan::New(r));
}
void VtkAbstractContextItemWrap::NewInstance(const Nan::FunctionCallbackInfo<v8::Value>& info)
{
VtkAbstractContextItemWrap *wrapper = ObjectWrap::Unwrap<VtkAbstractContextItemWrap>(info.Holder());
vtkAbstractContextItem *native = (vtkAbstractContextItem *)wrapper->native.GetPointer();
vtkAbstractContextItem * r;
if(info.Length() != 0)
{
Nan::ThrowError("Too many parameters.");
return;
}
r = native->NewInstance();
VtkAbstractContextItemWrap::InitPtpl();
v8::Local<v8::Value> argv[1] =
{ Nan::New(vtkNodeJsNoWrap) };
v8::Local<v8::Function> cons =
Nan::New<v8::FunctionTemplate>(VtkAbstractContextItemWrap::ptpl)->GetFunction();
v8::Local<v8::Object> wo = cons->NewInstance(1, argv);
VtkAbstractContextItemWrap *w = new VtkAbstractContextItemWrap();
w->native = r;
w->Wrap(wo);
info.GetReturnValue().Set(wo);
}
void VtkAbstractContextItemWrap::Paint(const Nan::FunctionCallbackInfo<v8::Value>& info)
{
VtkAbstractContextItemWrap *wrapper = ObjectWrap::Unwrap<VtkAbstractContextItemWrap>(info.Holder());
vtkAbstractContextItem *native = (vtkAbstractContextItem *)wrapper->native.GetPointer();
if(info.Length() > 0 && info[0]->IsObject() && (Nan::New(VtkContext2DWrap::ptpl))->HasInstance(info[0]))
{
VtkContext2DWrap *a0 = ObjectWrap::Unwrap<VtkContext2DWrap>(info[0]->ToObject());
bool r;
if(info.Length() != 1)
{
Nan::ThrowError("Too many parameters.");
return;
}
r = native->Paint(
(vtkContext2D *) a0->native.GetPointer()
);
info.GetReturnValue().Set(Nan::New(r));
return;
}
Nan::ThrowError("Parameter mismatch");
}
void VtkAbstractContextItemWrap::PaintChildren(const Nan::FunctionCallbackInfo<v8::Value>& info)
{
VtkAbstractContextItemWrap *wrapper = ObjectWrap::Unwrap<VtkAbstractContextItemWrap>(info.Holder());
vtkAbstractContextItem *native = (vtkAbstractContextItem *)wrapper->native.GetPointer();
if(info.Length() > 0 && info[0]->IsObject() && (Nan::New(VtkContext2DWrap::ptpl))->HasInstance(info[0]))
{
VtkContext2DWrap *a0 = ObjectWrap::Unwrap<VtkContext2DWrap>(info[0]->ToObject());
bool r;
if(info.Length() != 1)
{
Nan::ThrowError("Too many parameters.");
return;
}
r = native->PaintChildren(
(vtkContext2D *) a0->native.GetPointer()
);
info.GetReturnValue().Set(Nan::New(r));
return;
}
Nan::ThrowError("Parameter mismatch");
}
void VtkAbstractContextItemWrap::ReleaseGraphicsResources(const Nan::FunctionCallbackInfo<v8::Value>& info)
{
VtkAbstractContextItemWrap *wrapper = ObjectWrap::Unwrap<VtkAbstractContextItemWrap>(info.Holder());
vtkAbstractContextItem *native = (vtkAbstractContextItem *)wrapper->native.GetPointer();
if(info.Length() != 0)
{
Nan::ThrowError("Too many parameters.");
return;
}
native->ReleaseGraphicsResources();
}
void VtkAbstractContextItemWrap::RemoveItem(const Nan::FunctionCallbackInfo<v8::Value>& info)
{
VtkAbstractContextItemWrap *wrapper = ObjectWrap::Unwrap<VtkAbstractContextItemWrap>(info.Holder());
vtkAbstractContextItem *native = (vtkAbstractContextItem *)wrapper->native.GetPointer();
if(info.Length() > 0 && info[0]->IsObject() && (Nan::New(VtkAbstractContextItemWrap::ptpl))->HasInstance(info[0]))
{
VtkAbstractContextItemWrap *a0 = ObjectWrap::Unwrap<VtkAbstractContextItemWrap>(info[0]->ToObject());
bool r;
if(info.Length() != 1)
{
Nan::ThrowError("Too many parameters.");
return;
}
r = native->RemoveItem(
(vtkAbstractContextItem *) a0->native.GetPointer()
);
info.GetReturnValue().Set(Nan::New(r));
return;
}
Nan::ThrowError("Parameter mismatch");
}
void VtkAbstractContextItemWrap::SafeDownCast(const Nan::FunctionCallbackInfo<v8::Value>& info)
{
VtkAbstractContextItemWrap *wrapper = ObjectWrap::Unwrap<VtkAbstractContextItemWrap>(info.Holder());
vtkAbstractContextItem *native = (vtkAbstractContextItem *)wrapper->native.GetPointer();
if(info.Length() > 0 && info[0]->IsObject() && (Nan::New(VtkObjectBaseWrap::ptpl))->HasInstance(info[0]))
{
VtkObjectBaseWrap *a0 = ObjectWrap::Unwrap<VtkObjectBaseWrap>(info[0]->ToObject());
vtkAbstractContextItem * r;
if(info.Length() != 1)
{
Nan::ThrowError("Too many parameters.");
return;
}
r = native->SafeDownCast(
(vtkObjectBase *) a0->native.GetPointer()
);
VtkAbstractContextItemWrap::InitPtpl();
v8::Local<v8::Value> argv[1] =
{ Nan::New(vtkNodeJsNoWrap) };
v8::Local<v8::Function> cons =
Nan::New<v8::FunctionTemplate>(VtkAbstractContextItemWrap::ptpl)->GetFunction();
v8::Local<v8::Object> wo = cons->NewInstance(1, argv);
VtkAbstractContextItemWrap *w = new VtkAbstractContextItemWrap();
w->native = r;
w->Wrap(wo);
info.GetReturnValue().Set(wo);
return;
}
Nan::ThrowError("Parameter mismatch");
}
void VtkAbstractContextItemWrap::SetInteractive(const Nan::FunctionCallbackInfo<v8::Value>& info)
{
VtkAbstractContextItemWrap *wrapper = ObjectWrap::Unwrap<VtkAbstractContextItemWrap>(info.Holder());
vtkAbstractContextItem *native = (vtkAbstractContextItem *)wrapper->native.GetPointer();
if(info.Length() > 0 && info[0]->IsBoolean())
{
if(info.Length() != 1)
{
Nan::ThrowError("Too many parameters.");
return;
}
native->SetInteractive(
info[0]->BooleanValue()
);
return;
}
Nan::ThrowError("Parameter mismatch");
}
void VtkAbstractContextItemWrap::SetParent(const Nan::FunctionCallbackInfo<v8::Value>& info)
{
VtkAbstractContextItemWrap *wrapper = ObjectWrap::Unwrap<VtkAbstractContextItemWrap>(info.Holder());
vtkAbstractContextItem *native = (vtkAbstractContextItem *)wrapper->native.GetPointer();
if(info.Length() > 0 && info[0]->IsObject() && (Nan::New(VtkAbstractContextItemWrap::ptpl))->HasInstance(info[0]))
{
VtkAbstractContextItemWrap *a0 = ObjectWrap::Unwrap<VtkAbstractContextItemWrap>(info[0]->ToObject());
if(info.Length() != 1)
{
Nan::ThrowError("Too many parameters.");
return;
}
native->SetParent(
(vtkAbstractContextItem *) a0->native.GetPointer()
);
return;
}
Nan::ThrowError("Parameter mismatch");
}
void VtkAbstractContextItemWrap::SetScene(const Nan::FunctionCallbackInfo<v8::Value>& info)
{
VtkAbstractContextItemWrap *wrapper = ObjectWrap::Unwrap<VtkAbstractContextItemWrap>(info.Holder());
vtkAbstractContextItem *native = (vtkAbstractContextItem *)wrapper->native.GetPointer();
if(info.Length() > 0 && info[0]->IsObject() && (Nan::New(VtkContextSceneWrap::ptpl))->HasInstance(info[0]))
{
VtkContextSceneWrap *a0 = ObjectWrap::Unwrap<VtkContextSceneWrap>(info[0]->ToObject());
if(info.Length() != 1)
{
Nan::ThrowError("Too many parameters.");
return;
}
native->SetScene(
(vtkContextScene *) a0->native.GetPointer()
);
return;
}
Nan::ThrowError("Parameter mismatch");
}
void VtkAbstractContextItemWrap::SetVisible(const Nan::FunctionCallbackInfo<v8::Value>& info)
{
VtkAbstractContextItemWrap *wrapper = ObjectWrap::Unwrap<VtkAbstractContextItemWrap>(info.Holder());
vtkAbstractContextItem *native = (vtkAbstractContextItem *)wrapper->native.GetPointer();
if(info.Length() > 0 && info[0]->IsBoolean())
{
if(info.Length() != 1)
{
Nan::ThrowError("Too many parameters.");
return;
}
native->SetVisible(
info[0]->BooleanValue()
);
return;
}
Nan::ThrowError("Parameter mismatch");
}
void VtkAbstractContextItemWrap::Update(const Nan::FunctionCallbackInfo<v8::Value>& info)
{
VtkAbstractContextItemWrap *wrapper = ObjectWrap::Unwrap<VtkAbstractContextItemWrap>(info.Holder());
vtkAbstractContextItem *native = (vtkAbstractContextItem *)wrapper->native.GetPointer();
if(info.Length() != 0)
{
Nan::ThrowError("Too many parameters.");
return;
}
native->Update();
}
|
axkibe/node-vtk
|
wrappers/8.1.1/vtkAbstractContextItemWrap.cc
|
C++
|
bsd-3-clause
| 14,676 |
<!DOCTYPE html>
<html>
<head>
<title>PKIjs Mocha Test - OCSP Request Complex Example</title>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<link rel="stylesheet" href="https://cdn.rawgit.com/mochajs/mocha/2.2.5/mocha.css" />
</head>
<body>
<div id="mocha"></div>
<script src="https://cdn.rawgit.com/jquery/jquery/2.1.4/dist/jquery.min.js"></script>
<script src="https://cdn.rawgit.com/mochajs/mocha/2.2.5/mocha.js"></script>
<script src="https://cdn.rawgit.com/chaijs/chai/4.0.0-canary.1/chai.js"></script>
<script>mocha.setup('bdd'); window.assert = chai.assert;</script>
<script type="text/javascript" src="ocspRequestComplexExample.js"></script>
<script>
mocha.checkLeaks();
mocha.globals(['jQuery']);
mocha.run();
</script>
</body>
</html>
|
GlobalSign/PKI.js
|
test/browser/ocspRequestComplexExample.html
|
HTML
|
bsd-3-clause
| 933 |
package org.chasen.mecab.wrapper;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import org.junit.Test;
public class NodeIteratorTest {
@Test
public void threads() throws InterruptedException {
List<Thread> threads = new ArrayList<Thread>();
threads.add(new Thread(){
public void run(){
Tagger t = Tagger.create("-r /opt/local/etc/mecabrc");
for(MecabNode<Node, Path> node: t.iterator("本日は晴天なり")){
System.out.println(node.getSurface());
}
}
});
threads.add(new Thread(){
public void run(){
Tagger t = Tagger.create("-r /opt/local/etc/mecabrc");
for(MecabNode<Node, Path> node: t.iterator("本日は雨です")){
System.out.println(node.getSurface());
}
}
});
threads.add(new Thread(){
public void run(){
Tagger t = Tagger.create("-r /opt/local/etc/mecabrc");
for(MecabNode<Node, Path> node: t.iterator("昨日は曇りでした")){
System.out.println(node.getSurface());
}
}
});
for(Thread th: threads){
th.start();
}
for(Thread th: threads){
th.join();
}
}
@Test
public void executors() throws InterruptedException, ExecutionException {
class Hoge {
public void parse(String str){
Tagger t = Tagger.create("-r /opt/local/etc/mecabrc");
for(MecabNode<Node, Path> node: t.iterator(str)){
System.out.println(node.getSurface());
}
}
}
final Hoge hoge = new Hoge();
ExecutorService executors = Executors.newCachedThreadPool();
List<Future<?>> futures = new ArrayList<Future<?>>();
futures.add(executors.submit(new Callable<Void>(){
public Void call() throws Exception {
hoge.parse("本日は晴天なり");
return null;
}
}));
futures.add(executors.submit(new Callable<Void>(){
public Void call() throws Exception {
hoge.parse("本日は雨です");
return null;
}
}));
futures.add(executors.submit(new Callable<Void>(){
public Void call() throws Exception {
hoge.parse("昨日は曇りでした");
return null;
}
}));
for(Future<?> f: futures){
f.get();
}
}
@Test
public void executors_runnable() throws InterruptedException, ExecutionException {
class Hoge implements Runnable {
String str;
Hoge(String str){
this.str = str;
}
public void run(){
Tagger t = Tagger.create("-r /opt/local/etc/mecabrc");
for(MecabNode<Node, Path> node: t.iterator(str)){
System.out.println(node.getSurface());
}
}
}
ExecutorService executors = Executors.newCachedThreadPool();
List<Future<?>> futures = new ArrayList<Future<?>>();
futures.add(executors.submit(new Hoge("本日は晴天なり")));
futures.add(executors.submit(new Hoge("本日は雨です")));
futures.add(executors.submit(new Hoge("昨日は曇りでした")));
for(Future<?> f: futures){
f.get();
}
}
}
|
nowelium/jna-libmecab
|
test/org/chasen/mecab/wrapper/NodeIteratorTest.java
|
Java
|
bsd-3-clause
| 3,838 |
<html>
<body>
<p>Registration was requested {{ email }}. Open the link below to confirm e-mail address and continue registration.</p>
<a href="{{ url }}">{{ url }}</a>
</body>
</html>
|
triflesoft/django-application-talos
|
applications/talos/templates/talos/principal_registration/request_email_body.html
|
HTML
|
bsd-3-clause
| 195 |
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <time.h>
#include <iostream>
#include <string>
#include "parser.h"
#include "CLI/wrapper.h"
#include "Libraries/linenoise.h"
#include "CLI/interface.h"
#define HIST_FILENAME ".polyBobHistory"
int main(int argc, char **argv)
{
char* line;
unsigned int promptNb = 1;
char promptMsg[100];
srand(time(NULL));
printLogo();
/* Set the completion callback. This will be called every time the
* user uses the <tab> key. */
linenoiseSetCompletionCallback(completion);
/* Load history from file.*/
linenoiseHistoryLoad(HIST_FILENAME); /* Load the history at startup */
snprintf(promptMsg, 100, "%s[%d]: ", "\033[0m", promptNb);
while((line = linenoise(promptMsg)) != NULL)
{
linenoiseHistoryAdd(line); /* Add to the history. */
linenoiseHistorySave(HIST_FILENAME); /* Save the history on disk. */
/* Do something with the string. */
rmSuperscript(line);
if(line[0] == '/')
parseCommand(&(line[1]));
else if(!strcmp(line, "exit") || !strcmp(line, "quit") || (line[1] == 0 && (line[0] == 'e' || line[0] == 'q')))
break;
else if(line[0] != '\0')
{
simpleParserAPI(line);
}
snprintf(promptMsg, 100, "[%d]: ", ++promptNb);
}
finalProcessing();
return 0;
}
|
Taiki-San/Polybob
|
entrypoint.cpp
|
C++
|
bsd-3-clause
| 1,286 |
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
<meta http-equiv="X-UA-Compatible" content="IE=9"/>
<meta name="generator" content="Doxygen 1.8.10"/>
<title>BuildmLearn Store: org.buildmlearn.appstore.activities.HomeActivity Class Reference</title>
<link href="../../tabs.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="../../jquery.js"></script>
<script type="text/javascript" src="../../dynsections.js"></script>
<link href="../../navtree.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="../../resize.js"></script>
<script type="text/javascript" src="../../navtreedata.js"></script>
<script type="text/javascript" src="../../navtree.js"></script>
<script type="text/javascript">
$(document).ready(initResizable);
$(window).load(resizeHeight);
</script>
<link href="../../search/search.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="../../search/searchdata.js"></script>
<script type="text/javascript" src="../../search/search.js"></script>
<script type="text/javascript">
$(document).ready(function() { init_search(); });
</script>
<link href="../../doxygen.css" rel="stylesheet" type="text/css" />
</head>
<body>
<div id="top"><!-- do not remove this div, it is closed by doxygen! -->
<div id="titlearea">
<table cellspacing="0" cellpadding="0">
<tbody>
<tr style="height: 56px;">
<td id="projectlogo"><img alt="Logo" src="../../ic_launcher.png"/></td>
<td id="projectalign" style="padding-left: 0.5em;">
<div id="projectname">BuildmLearn Store
 <span id="projectnumber">1.0.0.0</span>
</div>
<div id="projectbrief">An android app, which is a store for apps built using BuildmLearn ToolKit</div>
</td>
</tr>
</tbody>
</table>
</div>
<!-- end header part -->
<!-- Generated by Doxygen 1.8.10 -->
<script type="text/javascript">
var searchBox = new SearchBox("searchBox", "../../search",false,'Search');
</script>
<div id="navrow1" class="tabs">
<ul class="tablist">
<li><a href="../../index.html"><span>Main Page</span></a></li>
<li><a href="../../namespaces.html"><span>Packages</span></a></li>
<li class="current"><a href="../../annotated.html"><span>Classes</span></a></li>
<li><a href="../../files.html"><span>Files</span></a></li>
<li>
<div id="MSearchBox" class="MSearchBoxInactive">
<span class="left">
<img id="MSearchSelect" src="../../search/mag_sel.png"
onmouseover="return searchBox.OnSearchSelectShow()"
onmouseout="return searchBox.OnSearchSelectHide()"
alt=""/>
<input type="text" id="MSearchField" value="Search" accesskey="S"
onfocus="searchBox.OnSearchFieldFocus(true)"
onblur="searchBox.OnSearchFieldFocus(false)"
onkeyup="searchBox.OnSearchFieldChange(event)"/>
</span><span class="right">
<a id="MSearchClose" href="javascript:searchBox.CloseResultsWindow()"><img id="MSearchCloseImg" border="0" src="../../search/close.png" alt=""/></a>
</span>
</div>
</li>
</ul>
</div>
<div id="navrow2" class="tabs2">
<ul class="tablist">
<li><a href="../../annotated.html"><span>Class List</span></a></li>
<li><a href="../../classes.html"><span>Class Index</span></a></li>
<li><a href="../../inherits.html"><span>Class Hierarchy</span></a></li>
<li><a href="../../functions.html"><span>Class Members</span></a></li>
</ul>
</div>
</div><!-- top -->
<div id="side-nav" class="ui-resizable side-nav-resizable">
<div id="nav-tree">
<div id="nav-tree-contents">
<div id="nav-sync" class="sync"></div>
</div>
</div>
<div id="splitbar" style="-moz-user-select:none;"
class="ui-resizable-handle">
</div>
</div>
<script type="text/javascript">
$(document).ready(function(){initNavTree('d8/dfa/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_home_activity.html','../../');});
</script>
<div id="doc-content">
<!-- window showing the filter options -->
<div id="MSearchSelectWindow"
onmouseover="return searchBox.OnSearchSelectShow()"
onmouseout="return searchBox.OnSearchSelectHide()"
onkeydown="return searchBox.OnSearchSelectKey(event)">
</div>
<!-- iframe showing the search results (closed by default) -->
<div id="MSearchResultsWindow">
<iframe src="javascript:void(0)" frameborder="0"
name="MSearchResults" id="MSearchResults">
</iframe>
</div>
<div class="header">
<div class="summary">
<a href="#pub-methods">Public Member Functions</a> |
<a href="#pub-static-methods">Static Public Member Functions</a> |
<a href="#pro-methods">Protected Member Functions</a> |
<a href="#pri-attribs">Private Attributes</a> |
<a href="#pri-static-attribs">Static Private Attributes</a> |
<a href="../../d9/dbd/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_home_activity-members.html">List of all members</a> </div>
<div class="headertitle">
<div class="title">org.buildmlearn.appstore.activities.HomeActivity Class Reference</div> </div>
</div><!--header-->
<div class="contents">
<p>This class is the Home Page, which has a viewpager to display tabs for Store section and My-Apps section.
<a href="../../d8/dfa/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_home_activity.html#details">More...</a></p>
<div id="dynsection-0" onclick="return toggleVisibility(this)" class="dynheader closed" style="cursor:pointer;">
<img id="dynsection-0-trigger" src="../../closed.png" alt="+"/> Inheritance diagram for org.buildmlearn.appstore.activities.HomeActivity:</div>
<div id="dynsection-0-summary" class="dynsummary" style="display:block;">
</div>
<div id="dynsection-0-content" class="dyncontent" style="display:none;">
<div class="center"><img src="../../da/d6b/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_home_activity__inherit__graph.png" border="0" usemap="#org_8buildmlearn_8appstore_8activities_8_home_activity_inherit__map" alt="Inheritance graph"/></div>
<map name="org_8buildmlearn_8appstore_8activities_8_home_activity_inherit__map" id="org_8buildmlearn_8appstore_8activities_8_home_activity_inherit__map">
<area shape="rect" id="node2" href="../../d0/d60/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity.html" title="This class is the parent class to which almost all the activities extends to. " alt="" coords="5,5,228,361"/>
</map>
</div>
<div id="dynsection-1" onclick="return toggleVisibility(this)" class="dynheader closed" style="cursor:pointer;">
<img id="dynsection-1-trigger" src="../../closed.png" alt="+"/> Collaboration diagram for org.buildmlearn.appstore.activities.HomeActivity:</div>
<div id="dynsection-1-summary" class="dynsummary" style="display:block;">
</div>
<div id="dynsection-1-content" class="dyncontent" style="display:none;">
<div class="center"><img src="../../de/d49/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_home_activity__coll__graph.png" border="0" usemap="#org_8buildmlearn_8appstore_8activities_8_home_activity_coll__map" alt="Collaboration graph"/></div>
<map name="org_8buildmlearn_8appstore_8activities_8_home_activity_coll__map" id="org_8buildmlearn_8appstore_8activities_8_home_activity_coll__map">
<area shape="rect" id="node2" href="../../d0/d60/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity.html" title="This class is the parent class to which almost all the activities extends to. " alt="" coords="5,5,228,361"/>
</map>
</div>
<table class="memberdecls">
<tr class="heading"><td colspan="2"><h2 class="groupheader"><a name="pub-methods"></a>
Public Member Functions</h2></td></tr>
<tr class="memitem:a7c7ce22b50eb6f7d92223be23f285191"><td class="memItemLeft" align="right" valign="top">void </td><td class="memItemRight" valign="bottom"><a class="el" href="../../d8/dfa/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_home_activity.html#a7c7ce22b50eb6f7d92223be23f285191">onBackPressed</a> ()</td></tr>
<tr class="memdesc:a7c7ce22b50eb6f7d92223be23f285191"><td class="mdescLeft"> </td><td class="mdescRight">This method is automatically called when the user presses the back button on his mobile. <a href="#a7c7ce22b50eb6f7d92223be23f285191">More...</a><br /></td></tr>
<tr class="separator:a7c7ce22b50eb6f7d92223be23f285191"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="inherit_header pub_methods_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td colspan="2" onclick="javascript:toggleInherit('pub_methods_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity')"><img src="../../closed.png" alt="-"/> Public Member Functions inherited from <a class="el" href="../../d0/d60/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity.html">org.buildmlearn.appstore.activities.NavigationActivity</a></td></tr>
<tr class="memitem:a0d03e67c759108fd119b9e5ad2e48014 inherit pub_methods_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td class="memItemLeft" align="right" valign="top">boolean </td><td class="memItemRight" valign="bottom"><a class="el" href="../../d0/d60/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity.html#a0d03e67c759108fd119b9e5ad2e48014">onCreateOptionsMenu</a> (Menu menu)</td></tr>
<tr class="memdesc:a0d03e67c759108fd119b9e5ad2e48014 inherit pub_methods_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td class="mdescLeft"> </td><td class="mdescRight">This method creates menu items to be shown on the Action Bar. <a href="#a0d03e67c759108fd119b9e5ad2e48014">More...</a><br /></td></tr>
<tr class="separator:a0d03e67c759108fd119b9e5ad2e48014 inherit pub_methods_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td class="memSeparator" colspan="2"> </td></tr>
</table><table class="memberdecls">
<tr class="heading"><td colspan="2"><h2 class="groupheader"><a name="pub-static-methods"></a>
Static Public Member Functions</h2></td></tr>
<tr class="memitem:a104177f795c32e9b28838760f994dc5f"><td class="memItemLeft" align="right" valign="top">static void </td><td class="memItemRight" valign="bottom"><a class="el" href="../../d8/dfa/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_home_activity.html#a104177f795c32e9b28838760f994dc5f">MyAppsView</a> ()</td></tr>
<tr class="memdesc:a104177f795c32e9b28838760f994dc5f"><td class="mdescLeft"> </td><td class="mdescRight">Set the current view to My-Apps section. <a href="#a104177f795c32e9b28838760f994dc5f">More...</a><br /></td></tr>
<tr class="separator:a104177f795c32e9b28838760f994dc5f"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="inherit_header pub_static_methods_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td colspan="2" onclick="javascript:toggleInherit('pub_static_methods_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity')"><img src="../../closed.png" alt="-"/> Static Public Member Functions inherited from <a class="el" href="../../d0/d60/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity.html">org.buildmlearn.appstore.activities.NavigationActivity</a></td></tr>
<tr class="memitem:a3819b15bd7eeb6b579ffc4ae12bb289b inherit pub_static_methods_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td class="memItemLeft" align="right" valign="top">static void </td><td class="memItemRight" valign="bottom"><a class="el" href="../../d0/d60/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity.html#a3819b15bd7eeb6b579ffc4ae12bb289b">clearSearch</a> ()</td></tr>
<tr class="memdesc:a3819b15bd7eeb6b579ffc4ae12bb289b inherit pub_static_methods_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td class="mdescLeft"> </td><td class="mdescRight">It clears the text in the search tool and collapses the search tool. <a href="#a3819b15bd7eeb6b579ffc4ae12bb289b">More...</a><br /></td></tr>
<tr class="separator:a3819b15bd7eeb6b579ffc4ae12bb289b inherit pub_static_methods_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td class="memSeparator" colspan="2"> </td></tr>
</table><table class="memberdecls">
<tr class="heading"><td colspan="2"><h2 class="groupheader"><a name="pro-methods"></a>
Protected Member Functions</h2></td></tr>
<tr class="memitem:a1a3c72988991108cb3f2f70b345a1a0d"><td class="memItemLeft" align="right" valign="top">void </td><td class="memItemRight" valign="bottom"><a class="el" href="../../d8/dfa/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_home_activity.html#a1a3c72988991108cb3f2f70b345a1a0d">onCreate</a> (Bundle savedInstanceState)</td></tr>
<tr class="memdesc:a1a3c72988991108cb3f2f70b345a1a0d"><td class="mdescLeft"> </td><td class="mdescRight">The method is executed first when the activity is created. <a href="#a1a3c72988991108cb3f2f70b345a1a0d">More...</a><br /></td></tr>
<tr class="separator:a1a3c72988991108cb3f2f70b345a1a0d"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="inherit_header pro_methods_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td colspan="2" onclick="javascript:toggleInherit('pro_methods_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity')"><img src="../../closed.png" alt="-"/> Protected Member Functions inherited from <a class="el" href="../../d0/d60/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity.html">org.buildmlearn.appstore.activities.NavigationActivity</a></td></tr>
<tr class="memitem:ac91b6839b81f831ca5accce5fb956f16 inherit pro_methods_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td class="memItemLeft" align="right" valign="top">void </td><td class="memItemRight" valign="bottom"><a class="el" href="../../d0/d60/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity.html#ac91b6839b81f831ca5accce5fb956f16">onCreate</a> (Bundle savedInstanceState)</td></tr>
<tr class="memdesc:ac91b6839b81f831ca5accce5fb956f16 inherit pro_methods_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td class="mdescLeft"> </td><td class="mdescRight">The method is executed first when the activity is created. <a href="#ac91b6839b81f831ca5accce5fb956f16">More...</a><br /></td></tr>
<tr class="separator:ac91b6839b81f831ca5accce5fb956f16 inherit pro_methods_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td class="memSeparator" colspan="2"> </td></tr>
</table><table class="memberdecls">
<tr class="heading"><td colspan="2"><h2 class="groupheader"><a name="pri-attribs"></a>
Private Attributes</h2></td></tr>
<tr class="memitem:a53f19cef5fb3efb3ee0a9f85888e669e"><td class="memItemLeft" align="right" valign="top">final CharSequence[] </td><td class="memItemRight" valign="bottom"><a class="el" href="../../d8/dfa/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_home_activity.html#a53f19cef5fb3efb3ee0a9f85888e669e">TITLES</a> ={"Store","My <a class="el" href="../../d7/d09/classorg_1_1buildmlearn_1_1appstore_1_1models_1_1_apps.html">Apps</a>"}</td></tr>
<tr class="separator:a53f19cef5fb3efb3ee0a9f85888e669e"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:aef6edc2f21ae7abe6c149f93db9a2ffa"><td class="memItemLeft" align="right" valign="top">MaterialDialog </td><td class="memItemRight" valign="bottom"><a class="el" href="../../d8/dfa/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_home_activity.html#aef6edc2f21ae7abe6c149f93db9a2ffa">mAlertDialog</a></td></tr>
<tr class="separator:aef6edc2f21ae7abe6c149f93db9a2ffa"><td class="memSeparator" colspan="2"> </td></tr>
</table><table class="memberdecls">
<tr class="heading"><td colspan="2"><h2 class="groupheader"><a name="pri-static-attribs"></a>
Static Private Attributes</h2></td></tr>
<tr class="memitem:ab762b13301ac55e4a33fbd31ec3a77da"><td class="memItemLeft" align="right" valign="top">static ViewPager </td><td class="memItemRight" valign="bottom"><a class="el" href="../../d8/dfa/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_home_activity.html#ab762b13301ac55e4a33fbd31ec3a77da">mPager</a></td></tr>
<tr class="separator:ab762b13301ac55e4a33fbd31ec3a77da"><td class="memSeparator" colspan="2"> </td></tr>
</table><table class="memberdecls">
<tr class="heading"><td colspan="2"><h2 class="groupheader"><a name="inherited"></a>
Additional Inherited Members</h2></td></tr>
<tr class="inherit_header pub_static_attribs_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td colspan="2" onclick="javascript:toggleInherit('pub_static_attribs_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity')"><img src="../../closed.png" alt="-"/> Static Public Attributes inherited from <a class="el" href="../../d0/d60/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity.html">org.buildmlearn.appstore.activities.NavigationActivity</a></td></tr>
<tr class="memitem:aa63ef9aa194cb5b06d2a8b22d32b03fc inherit pub_static_attribs_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td class="memItemLeft" align="right" valign="top">static FrameLayout </td><td class="memItemRight" valign="bottom"><a class="el" href="../../d0/d60/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity.html#aa63ef9aa194cb5b06d2a8b22d32b03fc">frameLayout</a></td></tr>
<tr class="separator:aa63ef9aa194cb5b06d2a8b22d32b03fc inherit pub_static_attribs_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:a92e76cddf17afa5981f148028c476ee7 inherit pub_static_attribs_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td class="memItemLeft" align="right" valign="top">static final List< <a class="el" href="../../d7/d09/classorg_1_1buildmlearn_1_1appstore_1_1models_1_1_apps.html">Apps</a> > </td><td class="memItemRight" valign="bottom"><a class="el" href="../../d0/d60/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity.html#a92e76cddf17afa5981f148028c476ee7">appList</a> = new ArrayList<>()</td></tr>
<tr class="separator:a92e76cddf17afa5981f148028c476ee7 inherit pub_static_attribs_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="inherit_header pac_static_attribs_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td colspan="2" onclick="javascript:toggleInherit('pac_static_attribs_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity')"><img src="../../closed.png" alt="-"/> Static Package Attributes inherited from <a class="el" href="../../d0/d60/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity.html">org.buildmlearn.appstore.activities.NavigationActivity</a></td></tr>
<tr class="memitem:a1b782826a8dd79a6b9a28232af4d4e1f inherit pac_static_attribs_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td class="memItemLeft" align="right" valign="top">static DrawerLayout </td><td class="memItemRight" valign="bottom"><a class="el" href="../../d0/d60/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity.html#a1b782826a8dd79a6b9a28232af4d4e1f">mDrawer</a></td></tr>
<tr class="separator:a1b782826a8dd79a6b9a28232af4d4e1f inherit pac_static_attribs_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:a8cd9a3fefc0d4e9b943c0ca4ef055f5d inherit pac_static_attribs_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td class="memItemLeft" align="right" valign="top">static int </td><td class="memItemRight" valign="bottom"><a class="el" href="../../d0/d60/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity.html#a8cd9a3fefc0d4e9b943c0ca4ef055f5d">mActive</a> =1</td></tr>
<tr class="separator:a8cd9a3fefc0d4e9b943c0ca4ef055f5d inherit pac_static_attribs_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:a37acfc524065531d4601db7f69291c63 inherit pac_static_attribs_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td class="memItemLeft" align="right" valign="top">static String </td><td class="memItemRight" valign="bottom"><a class="el" href="../../d0/d60/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity.html#a37acfc524065531d4601db7f69291c63">searchQuery</a> =""</td></tr>
<tr class="separator:a37acfc524065531d4601db7f69291c63 inherit pac_static_attribs_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:a5a00a6fabe2745f6e93c4c795a74b558 inherit pac_static_attribs_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td class="memItemLeft" align="right" valign="top">static int </td><td class="memItemRight" valign="bottom"><a class="el" href="../../d0/d60/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity.html#a5a00a6fabe2745f6e93c4c795a74b558">mActiveSearchInterface</a> =0</td></tr>
<tr class="separator:a5a00a6fabe2745f6e93c4c795a74b558 inherit pac_static_attribs_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:a3016ec45af2d5da1524d1341d0ac4c94 inherit pac_static_attribs_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td class="memItemLeft" align="right" valign="top">static NavigationView </td><td class="memItemRight" valign="bottom"><a class="el" href="../../d0/d60/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity.html#a3016ec45af2d5da1524d1341d0ac4c94">navigationView</a></td></tr>
<tr class="separator:a3016ec45af2d5da1524d1341d0ac4c94 inherit pac_static_attribs_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:a6491b85f1ab4a70a783da29fc081956a inherit pac_static_attribs_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td class="memItemLeft" align="right" valign="top">static boolean </td><td class="memItemRight" valign="bottom"><a class="el" href="../../d0/d60/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity.html#a6491b85f1ab4a70a783da29fc081956a">isDrawerOpened</a> =false</td></tr>
<tr class="separator:a6491b85f1ab4a70a783da29fc081956a inherit pac_static_attribs_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td class="memSeparator" colspan="2"> </td></tr>
</table>
<a name="details" id="details"></a><h2 class="groupheader">Detailed Description</h2>
<div class="textblock"><p>This class is the Home Page, which has a viewpager to display tabs for Store section and My-Apps section. </p>
</div><h2 class="groupheader">Member Function Documentation</h2>
<a class="anchor" id="a104177f795c32e9b28838760f994dc5f"></a>
<div class="memitem">
<div class="memproto">
<table class="mlabels">
<tr>
<td class="mlabels-left">
<table class="memname">
<tr>
<td class="memname">static void org.buildmlearn.appstore.activities.HomeActivity.MyAppsView </td>
<td>(</td>
<td class="paramname"></td><td>)</td>
<td></td>
</tr>
</table>
</td>
<td class="mlabels-right">
<span class="mlabels"><span class="mlabel">static</span></span> </td>
</tr>
</table>
</div><div class="memdoc">
<p>Set the current view to My-Apps section. </p>
<p>This method is helpful when the user selects to open the app from My-Apps section in the Settings Page. </p>
</div>
</div>
<a class="anchor" id="a7c7ce22b50eb6f7d92223be23f285191"></a>
<div class="memitem">
<div class="memproto">
<table class="memname">
<tr>
<td class="memname">void org.buildmlearn.appstore.activities.HomeActivity.onBackPressed </td>
<td>(</td>
<td class="paramname"></td><td>)</td>
<td></td>
</tr>
</table>
</div><div class="memdoc">
<p>This method is automatically called when the user presses the back button on his mobile. </p>
<p>It closes the Navigation Drawer if its open. Otherwise, it displays a popup to close the app. </p>
</div>
</div>
<a class="anchor" id="a1a3c72988991108cb3f2f70b345a1a0d"></a>
<div class="memitem">
<div class="memproto">
<table class="mlabels">
<tr>
<td class="mlabels-left">
<table class="memname">
<tr>
<td class="memname">void org.buildmlearn.appstore.activities.HomeActivity.onCreate </td>
<td>(</td>
<td class="paramtype">Bundle </td>
<td class="paramname"><em>savedInstanceState</em></td><td>)</td>
<td></td>
</tr>
</table>
</td>
<td class="mlabels-right">
<span class="mlabels"><span class="mlabel">protected</span></span> </td>
</tr>
</table>
</div><div class="memdoc">
<p>The method is executed first when the activity is created. </p>
<dl class="params"><dt>Parameters</dt><dd>
<table class="params">
<tr><td class="paramname">savedInstanceState</td><td>The bundle stores all the related parameters, if it has to be used when resuming the app. </td></tr>
</table>
</dd>
</dl>
<p>When the page selection is changed, search view should reset. The app list on the My_Apps section is also refreshed, just in case the user has installed any app form the Store section. </p><dl class="params"><dt>Parameters</dt><dd>
<table class="params">
<tr><td class="paramname">position</td><td>0:Store Section; 1: My-Apps Section</td></tr>
</table>
</dd>
</dl>
</div>
</div>
<h2 class="groupheader">Member Data Documentation</h2>
<a class="anchor" id="aef6edc2f21ae7abe6c149f93db9a2ffa"></a>
<div class="memitem">
<div class="memproto">
<table class="mlabels">
<tr>
<td class="mlabels-left">
<table class="memname">
<tr>
<td class="memname">MaterialDialog org.buildmlearn.appstore.activities.HomeActivity.mAlertDialog</td>
</tr>
</table>
</td>
<td class="mlabels-right">
<span class="mlabels"><span class="mlabel">private</span></span> </td>
</tr>
</table>
</div><div class="memdoc">
</div>
</div>
<a class="anchor" id="ab762b13301ac55e4a33fbd31ec3a77da"></a>
<div class="memitem">
<div class="memproto">
<table class="mlabels">
<tr>
<td class="mlabels-left">
<table class="memname">
<tr>
<td class="memname">ViewPager org.buildmlearn.appstore.activities.HomeActivity.mPager</td>
</tr>
</table>
</td>
<td class="mlabels-right">
<span class="mlabels"><span class="mlabel">static</span><span class="mlabel">private</span></span> </td>
</tr>
</table>
</div><div class="memdoc">
</div>
</div>
<a class="anchor" id="a53f19cef5fb3efb3ee0a9f85888e669e"></a>
<div class="memitem">
<div class="memproto">
<table class="mlabels">
<tr>
<td class="mlabels-left">
<table class="memname">
<tr>
<td class="memname">final CharSequence [] org.buildmlearn.appstore.activities.HomeActivity.TITLES ={"Store","My <a class="el" href="../../d7/d09/classorg_1_1buildmlearn_1_1appstore_1_1models_1_1_apps.html">Apps</a>"}</td>
</tr>
</table>
</td>
<td class="mlabels-right">
<span class="mlabels"><span class="mlabel">private</span></span> </td>
</tr>
</table>
</div><div class="memdoc">
</div>
</div>
<hr/>The documentation for this class was generated from the following file:<ul>
<li>C:/Users/Srujan/Documents/GitHub/BuildmLearn-Store/Android/source-code/AppStore/app/src/main/java/org/buildmlearn/appstore/activities/<a class="el" href="../../d1/d8f/_home_activity_8java.html">HomeActivity.java</a></li>
</ul>
</div><!-- contents -->
</div><!-- doc-content -->
<!-- start footer part -->
<div id="nav-path" class="navpath"><!-- id is needed for treeview function! -->
<ul>
<li class="navelem"><a class="el" href="../../db/d96/namespaceorg.html">org</a></li><li class="navelem"><a class="el" href="../../d7/d90/namespaceorg_1_1buildmlearn.html">buildmlearn</a></li><li class="navelem"><a class="el" href="../../d8/dcf/namespaceorg_1_1buildmlearn_1_1appstore.html">appstore</a></li><li class="navelem"><a class="el" href="../../d8/dbc/namespaceorg_1_1buildmlearn_1_1appstore_1_1activities.html">activities</a></li><li class="navelem"><a class="el" href="../../d8/dfa/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_home_activity.html">HomeActivity</a></li>
<li class="footer">Generated on Sat Aug 15 2015 21:55:11 for BuildmLearn Store by
<a href="http://www.doxygen.org/index.html">
<img class="footer" src="../../doxygen.png" alt="doxygen"/></a> 1.8.10 </li>
</ul>
</div>
</body>
</html>
|
BuildmLearn/BuildmLearn-Store
|
Android/doc/Doxygen/d8/dfa/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_home_activity.html
|
HTML
|
bsd-3-clause
| 29,511 |
/* this file has been autogenerated by vtkNodeJsWrap */
/* editing this might proof futile */
#define VTK_WRAPPING_CXX
#define VTK_STREAMS_FWD_ONLY
#include <nan.h>
#include "vtkCollectionWrap.h"
#include "vtkRenderWindowCollectionWrap.h"
#include "vtkObjectBaseWrap.h"
#include "vtkRenderWindowWrap.h"
#include "../../plus/plus.h"
using namespace v8;
extern Nan::Persistent<v8::Object> vtkNodeJsNoWrap;
Nan::Persistent<v8::FunctionTemplate> VtkRenderWindowCollectionWrap::ptpl;
VtkRenderWindowCollectionWrap::VtkRenderWindowCollectionWrap()
{ }
VtkRenderWindowCollectionWrap::VtkRenderWindowCollectionWrap(vtkSmartPointer<vtkRenderWindowCollection> _native)
{ native = _native; }
VtkRenderWindowCollectionWrap::~VtkRenderWindowCollectionWrap()
{ }
void VtkRenderWindowCollectionWrap::Init(v8::Local<v8::Object> exports)
{
Nan::SetAccessor(exports, Nan::New("vtkRenderWindowCollection").ToLocalChecked(), ConstructorGetter);
Nan::SetAccessor(exports, Nan::New("RenderWindowCollection").ToLocalChecked(), ConstructorGetter);
}
void VtkRenderWindowCollectionWrap::ConstructorGetter(
v8::Local<v8::String> property,
const Nan::PropertyCallbackInfo<v8::Value>& info)
{
InitPtpl();
info.GetReturnValue().Set(Nan::New(ptpl)->GetFunction());
}
void VtkRenderWindowCollectionWrap::InitPtpl()
{
if (!ptpl.IsEmpty()) return;
v8::Local<v8::FunctionTemplate> tpl = Nan::New<v8::FunctionTemplate>(New);
VtkCollectionWrap::InitPtpl( );
tpl->Inherit(Nan::New<FunctionTemplate>(VtkCollectionWrap::ptpl));
tpl->SetClassName(Nan::New("VtkRenderWindowCollectionWrap").ToLocalChecked());
tpl->InstanceTemplate()->SetInternalFieldCount(1);
Nan::SetPrototypeMethod(tpl, "AddItem", AddItem);
Nan::SetPrototypeMethod(tpl, "addItem", AddItem);
Nan::SetPrototypeMethod(tpl, "GetNextItem", GetNextItem);
Nan::SetPrototypeMethod(tpl, "getNextItem", GetNextItem);
Nan::SetPrototypeMethod(tpl, "NewInstance", NewInstance);
Nan::SetPrototypeMethod(tpl, "newInstance", NewInstance);
Nan::SetPrototypeMethod(tpl, "SafeDownCast", SafeDownCast);
Nan::SetPrototypeMethod(tpl, "safeDownCast", SafeDownCast);
#ifdef VTK_NODE_PLUS_VTKRENDERWINDOWCOLLECTIONWRAP_INITPTPL
VTK_NODE_PLUS_VTKRENDERWINDOWCOLLECTIONWRAP_INITPTPL
#endif
ptpl.Reset( tpl );
}
void VtkRenderWindowCollectionWrap::New(const Nan::FunctionCallbackInfo<v8::Value>& info)
{
if(!info.IsConstructCall())
{
Nan::ThrowError("Constructor not called in a construct call.");
return;
}
if(info.Length() == 0)
{
vtkSmartPointer<vtkRenderWindowCollection> native = vtkSmartPointer<vtkRenderWindowCollection>::New();
VtkRenderWindowCollectionWrap* obj = new VtkRenderWindowCollectionWrap(native);
obj->Wrap(info.This());
}
else
{
if(info[0]->ToObject() != vtkNodeJsNoWrap )
{
Nan::ThrowError("Parameter Error");
return;
}
}
info.GetReturnValue().Set(info.This());
}
void VtkRenderWindowCollectionWrap::AddItem(const Nan::FunctionCallbackInfo<v8::Value>& info)
{
VtkRenderWindowCollectionWrap *wrapper = ObjectWrap::Unwrap<VtkRenderWindowCollectionWrap>(info.Holder());
vtkRenderWindowCollection *native = (vtkRenderWindowCollection *)wrapper->native.GetPointer();
if(info.Length() > 0 && info[0]->IsObject() && (Nan::New(VtkRenderWindowWrap::ptpl))->HasInstance(info[0]))
{
VtkRenderWindowWrap *a0 = ObjectWrap::Unwrap<VtkRenderWindowWrap>(info[0]->ToObject());
if(info.Length() != 1)
{
Nan::ThrowError("Too many parameters.");
return;
}
native->AddItem(
(vtkRenderWindow *) a0->native.GetPointer()
);
return;
}
Nan::ThrowError("Parameter mismatch");
}
void VtkRenderWindowCollectionWrap::GetNextItem(const Nan::FunctionCallbackInfo<v8::Value>& info)
{
VtkRenderWindowCollectionWrap *wrapper = ObjectWrap::Unwrap<VtkRenderWindowCollectionWrap>(info.Holder());
vtkRenderWindowCollection *native = (vtkRenderWindowCollection *)wrapper->native.GetPointer();
vtkRenderWindow * r;
if(info.Length() != 0)
{
Nan::ThrowError("Too many parameters.");
return;
}
r = native->GetNextItem();
VtkRenderWindowWrap::InitPtpl();
v8::Local<v8::Value> argv[1] =
{ Nan::New(vtkNodeJsNoWrap) };
v8::Local<v8::Function> cons =
Nan::New<v8::FunctionTemplate>(VtkRenderWindowWrap::ptpl)->GetFunction();
v8::Local<v8::Object> wo = cons->NewInstance(1, argv);
VtkRenderWindowWrap *w = new VtkRenderWindowWrap();
w->native = r;
w->Wrap(wo);
info.GetReturnValue().Set(wo);
}
void VtkRenderWindowCollectionWrap::NewInstance(const Nan::FunctionCallbackInfo<v8::Value>& info)
{
VtkRenderWindowCollectionWrap *wrapper = ObjectWrap::Unwrap<VtkRenderWindowCollectionWrap>(info.Holder());
vtkRenderWindowCollection *native = (vtkRenderWindowCollection *)wrapper->native.GetPointer();
vtkRenderWindowCollection * r;
if(info.Length() != 0)
{
Nan::ThrowError("Too many parameters.");
return;
}
r = native->NewInstance();
VtkRenderWindowCollectionWrap::InitPtpl();
v8::Local<v8::Value> argv[1] =
{ Nan::New(vtkNodeJsNoWrap) };
v8::Local<v8::Function> cons =
Nan::New<v8::FunctionTemplate>(VtkRenderWindowCollectionWrap::ptpl)->GetFunction();
v8::Local<v8::Object> wo = cons->NewInstance(1, argv);
VtkRenderWindowCollectionWrap *w = new VtkRenderWindowCollectionWrap();
w->native = r;
w->Wrap(wo);
info.GetReturnValue().Set(wo);
}
void VtkRenderWindowCollectionWrap::SafeDownCast(const Nan::FunctionCallbackInfo<v8::Value>& info)
{
VtkRenderWindowCollectionWrap *wrapper = ObjectWrap::Unwrap<VtkRenderWindowCollectionWrap>(info.Holder());
vtkRenderWindowCollection *native = (vtkRenderWindowCollection *)wrapper->native.GetPointer();
if(info.Length() > 0 && info[0]->IsObject() && (Nan::New(VtkObjectBaseWrap::ptpl))->HasInstance(info[0]))
{
VtkObjectBaseWrap *a0 = ObjectWrap::Unwrap<VtkObjectBaseWrap>(info[0]->ToObject());
vtkRenderWindowCollection * r;
if(info.Length() != 1)
{
Nan::ThrowError("Too many parameters.");
return;
}
r = native->SafeDownCast(
(vtkObjectBase *) a0->native.GetPointer()
);
VtkRenderWindowCollectionWrap::InitPtpl();
v8::Local<v8::Value> argv[1] =
{ Nan::New(vtkNodeJsNoWrap) };
v8::Local<v8::Function> cons =
Nan::New<v8::FunctionTemplate>(VtkRenderWindowCollectionWrap::ptpl)->GetFunction();
v8::Local<v8::Object> wo = cons->NewInstance(1, argv);
VtkRenderWindowCollectionWrap *w = new VtkRenderWindowCollectionWrap();
w->native = r;
w->Wrap(wo);
info.GetReturnValue().Set(wo);
return;
}
Nan::ThrowError("Parameter mismatch");
}
|
axkibe/node-vtk
|
wrappers/8.1.1/vtkRenderWindowCollectionWrap.cc
|
C++
|
bsd-3-clause
| 6,447 |
# Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from collections import namedtuple
from blinkpy.common.net.results_fetcher import TestResultsFetcher
BuilderStep = namedtuple('BuilderStep', ['build', 'step_name'])
# TODO(qyearsley): To be consistent with other fake ("mock") classes, this
# could be changed so it's not a subclass of TestResultsFetcher.
class MockTestResultsFetcher(TestResultsFetcher):
def __init__(self):
super(MockTestResultsFetcher, self).__init__()
self._canned_results = {}
self._canned_retry_summary_json = {}
self._webdriver_results = {}
self.fetched_builds = []
self.fetched_webdriver_builds = []
self._layout_test_step_name = 'blink_web_tests (with patch)'
def set_results(self, build, results, step_name=None):
step_name = step_name or self.get_layout_test_step_name(build)
step = BuilderStep(build=build, step_name=step_name)
self._canned_results[step] = results
def fetch_results(self, build, full=False, step_name=None):
step_name = step_name or self.get_layout_test_step_name(build)
step = BuilderStep(build=build, step_name=step_name)
self.fetched_builds.append(step)
return self._canned_results.get(step)
def set_results_to_resultdb(self, build, results):
self._canned_results[build.build_id] = results
def fetch_results_from_resultdb(self, host, builds, predicate):
rv = []
for build in builds:
results = self._canned_results.get(build.build_id)
if results:
rv.extend(results)
return rv
def set_webdriver_test_results(self, build, m, results):
self._webdriver_results[(build, m)] = results
def fetch_webdriver_test_results(self, build, m):
self.fetched_webdriver_builds.append((build, m))
return self._webdriver_results.get((build, m))
def set_retry_sumary_json(self, build, content):
self._canned_retry_summary_json[build] = content
def fetch_retry_summary_json(self, build):
return self._canned_retry_summary_json.get(build)
def set_layout_test_step_name(self, name):
self._layout_test_step_name = name
def get_layout_test_step_name(self, build):
return self._layout_test_step_name
|
ric2b/Vivaldi-browser
|
chromium/third_party/blink/tools/blinkpy/common/net/results_fetcher_mock.py
|
Python
|
bsd-3-clause
| 3,794 |
# -*- coding: utf-8 -*-
import access
import util
@auth.requires_login()
def index():
"""Produces a list of the feedback obtained for a given venue,
or for all venues."""
venue_id = request.args(0)
if venue_id == 'all':
q = (db.submission.user == get_user_email())
else:
q = ((db.submission.user == get_user_email())
& (db.submission.venue_id == venue_id))
db.submission.id.represent = lambda x, r: A(T('View'), _class='btn', _href=URL('submission', 'view_own_submission', args=['v', r.id]))
db.submission.id.label = T('Submission')
db.submission.id.readable = True
db.submission.venue_id.readable = True
grid = SQLFORM.grid(q,
fields=[db.submission.id, db.submission.venue_id,
db.submission.date_created, db.submission.date_updated, ],
csv=False, details=False, create=False, editable=False, deletable=False,
args=request.args[:1],
maxtextlength=24,
)
return dict(grid=grid)
@auth.requires_login()
def view_feedback():
"""Shows detailed feedback for a user in a venue.
This controller accepts various types of arguments:
* 's', submission_id
* 'u', venue_id, username
* 'v', venue_id (in which case, shows own submission to that venue)
"""
if len(request.args) == 0:
redirect(URL('default', 'index'))
if request.args(0) == 's':
# submission_id
n_args = 2
subm = db.submission(request.args(1)) or redirect(URL('default', 'index'))
c = db.venue(subm.venue_id) or redirect(URL('default', 'index'))
username = subm.user
elif request.args(0) == 'v':
# venue_id
n_args = 2
c = db.venue(request.args(1)) or redirect(URL('default', 'index'))
username = get_user_email()
subm = db((db.submission.user == username) & (db.submission.venue_id == c.id)).select().first()
else:
# venue_id, username
n_args = 3
c = db.venue(request.args(1)) or redirect(URL('default', 'index'))
username = request.args(2) or redirect(URL('default', 'index'))
subm = db((db.submission.user == username) & (db.submission.venue_id == c.id)).select().first()
# Checks permissions.
props = db(db.user_properties.user == get_user_email()).select().first()
if props == None:
session.flash = T('Not authorized.')
redirect(URL('default', 'index'))
is_author = (username == get_user_email())
can_view_feedback = access.can_view_feedback(c, props) or is_author
if (not can_view_feedback):
session.flash = T('Not authorized.')
redirect(URL('default', 'index'))
if not (access.can_view_feedback(c, props) or datetime.utcnow() > c.rate_close_date):
session.flash = T('The ratings are not yet available.')
redirect(URL('feedback', 'index', args=['all']))
# Produces the link to edit the feedback.
edit_feedback_link = None
if subm is not None and access.can_observe(c, props):
edit_feedback_link = A(T('Edit feedback'), _class='btn',
_href=URL('submission', 'edit_feedback', args=[subm.id]))
# Produces the download link.
download_link = None
if subm is not None and c.allow_file_upload and subm.content is not None:
if is_author:
download_link = A(T('Download'), _class='btn',
_href=URL('submission', 'download_author', args=[subm.id, subm.content]))
else:
download_link = A(T('Download'), _class='btn',
_href=URL('submission', 'download_manager', args=[subm.id, subm.content]))
venue_link = A(c.name, _href=URL('venues', 'view_venue', args=[c.id]))
# Submission link.
subm_link = None
if subm is not None and c.allow_link_submission:
subm_link = A(subm.link, _href=subm.link)
# Submission content and feedback.
subm_comment = None
subm_feedback = None
if subm is not None:
raw_subm_comment = keystore_read(subm.comment)
if raw_subm_comment is not None and len(raw_subm_comment) > 0:
subm_comment = MARKMIN(keystore_read(subm.comment))
raw_feedback = keystore_read(subm.feedback)
if raw_feedback is not None and len(raw_feedback) > 0:
subm_feedback = MARKMIN(raw_feedback)
# Display settings.
db.submission.percentile.readable = True
db.submission.comment.readable = True
db.submission.feedback.readable = True
if access.can_observe(c, props):
db.submission.quality.readable = True
db.submission.error.readable = True
# Reads the grade information.
submission_grade = submission_percentile = None
review_grade = review_percentile = user_reputation = None
final_grade = final_percentile = None
assigned_grade = None
if c.grades_released:
grade_info = db((db.grades.user == username) & (db.grades.venue_id == c.id)).select().first()
if grade_info is not None:
submission_grade = represent_quality(grade_info.submission_grade, None)
submission_percentile = represent_percentage(grade_info.submission_percentile, None)
review_grade = represent_quality_10(grade_info.accuracy, None)
review_percentile = represent_percentage(grade_info.accuracy_percentile, None)
user_reputation = represent_01_as_percentage(grade_info.reputation, None)
final_grade = represent_quality(grade_info.grade, None)
final_percentile = represent_percentage(grade_info.percentile, None)
assigned_grade = represent_quality(grade_info.assigned_grade, None)
# Makes a grid of comments.
db.task.submission_name.readable = False
db.task.assigned_date.readable = False
db.task.completed_date.readable = False
db.task.rejected.readable = True
db.task.helpfulness.readable = db.task.helpfulness.writable = True
# Prevent editing the comments; the only thing editable should be the "is bogus" field.
db.task.comments.writable = False
db.task.comments.readable = True
ranking_link = None
if access.can_observe(c, props):
db.task.user.readable = True
db.task.completed_date.readable = True
links = [
dict(header=T('Review details'), body= lambda r:
A(T('View'), _class='btn', _href=URL('ranking', 'view_comparison', args=[r.id]))),
]
details = False
if subm is not None:
ranking_link = A(T('details'), _href=URL('ranking', 'view_comparisons_given_submission', args=[subm.id]))
reviews_link = A(T('details'), _href=URL('ranking', 'view_comparisons_given_user', args=[username, c.id]))
db.task.user.represent = lambda v, r: A(v, _href=URL('ranking', 'view_comparisons_given_user',
args=[v, c.id], user_signature=True))
else:
user_reputation = None
links = [
dict(header=T('Review feedback'), body = lambda r:
A(T('Give feedback'), _class='btn',
_href=URL('feedback', 'reply_to_review', args=[r.id], user_signature=True))),
]
details = False
ranking_link = None
reviews_link = None
if subm is not None:
q = ((db.task.submission_id == subm.id) & (db.task.is_completed == True))
# q = (db.task.submission_id == subm.id)
else:
q = (db.task.id == -1)
grid = SQLFORM.grid(q,
fields=[db.task.id, db.task.user, db.task.rejected, db.task.comments, db.task.helpfulness, ],
details = details,
csv=False, create=False, editable=False, deletable=False, searchable=False,
links=links,
args=request.args[:n_args],
maxtextlength=24,
)
return dict(subm=subm, download_link=download_link, subm_link=subm_link, username=username,
subm_comment=subm_comment, subm_feedback=subm_feedback,
edit_feedback_link=edit_feedback_link,
is_admin=is_user_admin(),
submission_grade=submission_grade, submission_percentile=submission_percentile,
review_grade=review_grade, review_percentile=review_percentile,
user_reputation=user_reputation,
final_grade=final_grade, final_percentile=final_percentile,
assigned_grade=assigned_grade,
venue_link=venue_link, grid=grid, ranking_link=ranking_link,
reviews_link=reviews_link)
@auth.requires_signature()
def reply_to_review():
t = db.task(request.args(0)) or redirect(URL('default', 'index'))
db.task.submission_name.readable = False
db.task.assigned_date.readable = False
db.task.completed_date.readable = False
db.task.comments.readable = False
db.task.helpfulness.readable = db.task.helpfulness.writable = True
db.task.feedback.readable = db.task.feedback.writable = True
form = SQLFORM(db.task, record=t)
form.vars.feedback = keystore_read(t.feedback)
if form.process(onvalidation=validate_review_feedback(t)).accepted:
session.flash = T('Updated.')
redirect(URL('feedback', 'view_feedback', args=['s', t.submission_id]))
link_to_submission = A(T('View submission'), _href=URL('submission', 'view_own_submission', args=['v', t.submission_id]))
review_comments = MARKMIN(keystore_read(t.comments))
return dict(form=form, link_to_submission=link_to_submission, review_comments=review_comments)
def validate_review_feedback(t):
def f(form):
if not form.errors:
feedback_id = keystore_update(t.feedback, form.vars.feedback)
form.vars.feedback = feedback_id
return f
@auth.requires_login()
def view_my_reviews():
"""This controller displays the reviews a user has written for a venue, along with
the feedback they received."""
c = db.venue(request.args(0)) or redirect(URL('rating', 'review_index'))
link_to_venue = A(c.name, _href=URL('venues', 'view_venue', args=[c.id]))
link_to_eval = A(T('My evaluation in this venue'), _class='btn',
_href=URL('feedback', 'view_feedback', args=['v', c.id]))
q = ((db.task.user == get_user_email()) & (db.task.venue_id == c.id))
db.task.rejected.readable = True
db.task.helpfulness.readable = True
db.task.comments.readable = True
db.task.feedback.readable = True
# To prevent chopping
db.task.submission_name.represent = represent_text_field
grid = SQLFORM.grid(q,
fields=[db.task.submission_name, db.task.rejected, db.task.helpfulness],
details=True,
editable=False, deletable=False, create=False, searchable=False,
csv=False,
args=request.args[:1],
maxtextlength=24,
)
return dict(grid=grid, link_to_venue=link_to_venue, link_to_eval=link_to_eval)
|
lucadealfaro/crowdranker
|
controllers/feedback.py
|
Python
|
bsd-3-clause
| 10,966 |
<?php
use yii\helpers\Html;
use yii\grid\GridView;
/* @var $this yii\web\View */
/* @var $searchModel app\models\search\UserSearch */
/* @var $dataProvider yii\data\ActiveDataProvider */
$this->title = 'Users';
$this->params['breadcrumbs'][] = $this->title;
?>
<div class="user-index">
<h1><?= Html::encode($this->title) ?></h1>
<?php // echo $this->render('_search', ['model' => $searchModel]); ?>
<p>
<?= Html::a('Create User', ['create'], ['class' => 'btn btn-success']) ?>
</p>
<?= GridView::widget([
'dataProvider' => $dataProvider,
'filterModel' => $searchModel,
'columns' => [
['class' => 'yii\grid\SerialColumn'],
'id',
'username',
'password',
'fullname',
'is_seller',
// 'lat',
// 'lng',
// 'category_id',
// 'description:ntext',
['class' => 'yii\grid\ActionColumn'],
],
]); ?>
</div>
|
vincentsthe/market-on
|
views/user/index.php
|
PHP
|
bsd-3-clause
| 999 |
//------------------------------------------------------------------------------
// GB_AxB: hard-coded C=A*B and C<M>=A*B
//------------------------------------------------------------------------------
// SuiteSparse:GraphBLAS, Timothy A. Davis, (c) 2017-2018, All Rights Reserved.
// http://suitesparse.com See GraphBLAS/Doc/License.txt for license.
// If this filename has a double underscore in its name ("__") then it has been
// automatically constructed from Generator/GB_AxB.c, via the Source/axb*.m
// scripts, and should not be editted. Edit the original source file instead.
//------------------------------------------------------------------------------
#include "GB.h"
#ifndef GBCOMPACT
#include "GB_heap.h"
#include "GB_AxB__semirings.h"
// The C=A*B semiring is defined by the following types and operators:
// A*B function (Gustavon): GB_AgusB__times_isgt_int32
// A'*B function (dot): GB_AdotB__times_isgt_int32
// A*B function (heap): GB_AheapB__times_isgt_int32
// Z type: int32_t (the type of C)
// X type: int32_t (the type of x for z=mult(x,y))
// Y type: int32_t (the type of y for z=mult(x,y))
// handle flipxy: 0 (0 if mult(x,y) is commutative, 1 otherwise)
// Identity: 1 (where cij *= identity does not change cij)
// Multiply: z = x > y
// Add: cij *= z
#define GB_XTYPE \
int32_t
#define GB_YTYPE \
int32_t
#define GB_HANDLE_FLIPXY \
0
#define GB_MULTOP(z,x,y) \
z = x > y
//------------------------------------------------------------------------------
// C<M>=A*B and C=A*B: gather/scatter saxpy-based method (Gustavson)
//------------------------------------------------------------------------------
#define GB_IDENTITY \
1
// x [i] = y
#define GB_COPY_SCALAR_TO_ARRAY(x,i,y,s) \
x [i] = y ;
// x = y [i]
#define GB_COPY_ARRAY_TO_SCALAR(x,y,i,s) \
GB_btype x = y [i] ;
// x [i] = y [i]
#define GB_COPY_ARRAY_TO_ARRAY(x,i,y,j,s) \
x [i] = y [j] ;
// mult-add operation (no mask)
#define GB_MULTADD_NOMASK \
{ \
/* Sauna_Work [i] += A(i,k) * B(k,j) */ \
GB_atype aik = Ax [pA] ; \
int32_t t ; \
GB_MULTIPLY (t, aik, bkj) ; \
Sauna_Work [i] *= t ; \
}
// mult-add operation (with mask)
#define GB_MULTADD_WITH_MASK \
{ \
/* Sauna_Work [i] += A(i,k) * B(k,j) */ \
GB_atype aik = Ax [pA] ; \
int32_t t ; \
GB_MULTIPLY (t, aik, bkj) ; \
if (mark == hiwater) \
{ \
/* first time C(i,j) seen */ \
Sauna_Mark [i] = hiwater + 1 ; \
Sauna_Work [i] = t ; \
} \
else \
{ \
/* C(i,j) seen before, update it */ \
Sauna_Work [i] *= t ; \
} \
}
GrB_Info GB_AgusB__times_isgt_int32
(
GrB_Matrix C,
const GrB_Matrix M,
const GrB_Matrix A,
const GrB_Matrix B,
bool flipxy, // if true, A and B have been swapped
GB_Sauna Sauna, // sparse accumulator
GB_Context Context
)
{
int32_t *restrict Sauna_Work = Sauna->Sauna_Work ; // size C->vlen*zsize
int32_t *restrict Cx = C->x ;
GrB_Info info = GrB_SUCCESS ;
#include "GB_AxB_Gustavson_flipxy.c"
return (info) ;
}
//------------------------------------------------------------------------------
// C<M>=A'*B or C=A'*B: dot product
//------------------------------------------------------------------------------
// get A(k,i)
#define GB_DOT_GETA(pA) \
GB_atype aki = Ax [pA] ;
// get B(k,j)
#define GB_DOT_GETB(pB) \
GB_btype bkj = Bx [pB] ;
// t = aki*bkj
#define GB_DOT_MULT(bkj) \
int32_t t ; \
GB_MULTIPLY (t, aki, bkj) ;
// cij += t
#define GB_DOT_ADD \
cij *= t ;
// cij = t
#define GB_DOT_COPY \
cij = t ;
// cij is not a pointer but a scalar; nothing to do
#define GB_DOT_REACQUIRE ;
// clear cij
#define GB_DOT_CLEAR \
cij = 1 ;
// save the value of C(i,j)
#define GB_DOT_SAVE \
Cx [cnz] = cij ;
#define GB_DOT_WORK_TYPE \
GB_btype
#define GB_DOT_WORK(k) Work [k]
// Work [k] = Bx [pB]
#define GB_DOT_SCATTER \
Work [k] = Bx [pB] ;
GrB_Info GB_AdotB__times_isgt_int32
(
GrB_Matrix *Chandle,
const GrB_Matrix M,
const GrB_Matrix A,
const GrB_Matrix B,
bool flipxy, // if true, A and B have been swapped
GB_Context Context
)
{
GrB_Matrix C = (*Chandle) ;
int32_t *restrict Cx = C->x ;
int32_t cij ;
GrB_Info info = GrB_SUCCESS ;
size_t bkj_size = B->type->size ; // no typecasting here
#include "GB_AxB_dot_flipxy.c"
return (info) ;
}
//------------------------------------------------------------------------------
// C<M>=A*B and C=A*B: heap saxpy-based method
//------------------------------------------------------------------------------
#define GB_CIJ_GETB(pB) \
GB_btype bkj = Bx [pB] ;
// C(i,j) = A(i,k) * bkj
#define GB_CIJ_MULT(pA) \
{ \
GB_atype aik = Ax [pA] ; \
GB_MULTIPLY (cij, aik, bkj) ; \
}
// C(i,j) += A(i,k) * B(k,j)
#define GB_CIJ_MULTADD(pA,pB) \
{ \
GB_atype aik = Ax [pA] ; \
GB_btype bkj = Bx [pB] ; \
int32_t t ; \
GB_MULTIPLY (t, aik, bkj) ; \
cij *= t ; \
}
// cij is not a pointer but a scalar; nothing to do
#define GB_CIJ_REACQUIRE ;
// cij = identity
#define GB_CIJ_CLEAR \
cij = 1 ;
// save the value of C(i,j)
#define GB_CIJ_SAVE \
Cx [cnz] = cij ;
GrB_Info GB_AheapB__times_isgt_int32
(
GrB_Matrix *Chandle,
const GrB_Matrix M,
const GrB_Matrix A,
const GrB_Matrix B,
bool flipxy, // if true, A and B have been swapped
int64_t *restrict List,
GB_pointer_pair *restrict pA_pair,
GB_Element *restrict Heap,
const int64_t bjnz_max,
GB_Context Context
)
{
GrB_Matrix C = (*Chandle) ;
int32_t *restrict Cx = C->x ;
int32_t cij ;
int64_t cvlen = C->vlen ;
GrB_Info info = GrB_SUCCESS ;
GB_CIJ_CLEAR ;
#include "GB_AxB_heap_flipxy.c"
return (info) ;
}
//------------------------------------------------------------------------------
// clear macro definitions
//------------------------------------------------------------------------------
#undef GB_XTYPE
#undef GB_YTYPE
#undef GB_HANDLE_FLIPXY
#undef GB_MULTOP
#undef GB_IDENTITY
#undef GB_COPY_SCALAR_TO_ARRAY
#undef GB_COPY_ARRAY_TO_SCALAR
#undef GB_COPY_ARRAY_TO_ARRAY
#undef GB_MULTADD_NOMASK
#undef GB_MULTADD_WITH_MASK
#undef GB_DOT_GETA
#undef GB_DOT_GETB
#undef GB_DOT_MULT
#undef GB_DOT_ADD
#undef GB_DOT_COPY
#undef GB_DOT_REACQUIRE
#undef GB_DOT_CLEAR
#undef GB_DOT_SAVE
#undef GB_DOT_WORK_TYPE
#undef GB_DOT_WORK
#undef GB_DOT_SCATTER
#undef GB_CIJ_GETB
#undef GB_CIJ_MULT
#undef GB_CIJ_MULTADD
#undef GB_CIJ_REACQUIRE
#undef GB_CIJ_CLEAR
#undef GB_CIJ_SAVE
#undef GB_MULTIPLY
#endif
|
jlblancoc/suitesparse-metis-for-windows
|
SuiteSparse/GraphBLAS/Source/Generated/GB_AxB__times_isgt_int32.c
|
C
|
bsd-3-clause
| 8,225 |
from __future__ import print_function
import shutil
import os, sys
import time
import logging
from .loaders import PythonLoader, YAMLLoader
from .bundle import get_all_bundle_files
from .exceptions import BuildError
from .updater import TimestampUpdater
from .merge import MemoryHunk
from .version import get_manifest
from .cache import FilesystemCache
from .utils import set, StringIO
__all__ = ('CommandError', 'CommandLineEnvironment', 'main')
# logging has WARNING as default level, for the CLI we want INFO. Set this
# as early as possible, so that user customizations will not be overwritten.
logging.getLogger('webassets.script').setLevel(logging.INFO)
class CommandError(Exception):
pass
class Command(object):
"""Base-class for a command used by :class:`CommandLineEnvironment`.
Each command being a class opens up certain possibilities with respect to
subclassing and customizing the default CLI.
"""
def __init__(self, cmd_env):
self.cmd = cmd_env
def __getattr__(self, name):
# Make stuff from cmd environment easier to access
return getattr(self.cmd, name)
def __call__(self, *args, **kwargs):
raise NotImplementedError()
class BuildCommand(Command):
def __call__(self, bundles=None, output=None, directory=None, no_cache=None,
manifest=None, production=None):
"""Build assets.
``bundles``
A list of bundle names. If given, only this list of bundles
should be built.
``output``
List of (bundle, filename) 2-tuples. If given, only these
bundles will be built, using the custom output filenames.
Cannot be used with ``bundles``.
``directory``
Custom output directory to use for the bundles. The original
basenames defined in the bundle ``output`` attribute will be
used. If the ``output`` of the bundles are pointing to different
directories, they will be offset by their common prefix.
Cannot be used with ``output``.
``no_cache``
If set, a cache (if one is configured) will not be used.
``manifest``
If set, the given manifest instance will be used, instead of
any that might have been configured in the Environment. The value
passed will be resolved through ``get_manifest()``. If this fails,
a file-based manifest will be used using the given value as the
filename.
``production``
If set to ``True``, then :attr:`Environment.debug`` will forcibly
be disabled (set to ``False``) during the build.
"""
# Validate arguments
if bundles and output:
raise CommandError(
'When specifying explicit output filenames you must '
'do so for all bundles you want to build.')
if directory and output:
raise CommandError('A custom output directory cannot be '
'combined with explicit output filenames '
'for individual bundles.')
if production:
# TODO: Reset again (refactor commands to be classes)
self.environment.debug = False
# TODO: Oh how nice it would be to use the future options stack.
if manifest is not None:
try:
manifest = get_manifest(manifest, env=self.environment)
except ValueError:
manifest = get_manifest(
# abspath() is important, or this will be considered
# relative to Environment.directory.
"file:%s" % os.path.abspath(manifest),
env=self.environment)
self.environment.manifest = manifest
# Use output as a dict.
if output:
output = dict(output)
# Validate bundle names
bundle_names = bundles if bundles else (output.keys() if output else [])
for name in bundle_names:
if not name in self.environment:
raise CommandError(
'I do not know a bundle name named "%s".' % name)
# Make a list of bundles to build, and the filename to write to.
if bundle_names:
# TODO: It's not ok to use an internal property here.
bundles = [(n,b) for n, b in self.environment._named_bundles.items()
if n in bundle_names]
else:
# Includes unnamed bundles as well.
bundles = [(None, b) for b in self.environment]
# Determine common prefix for use with ``directory`` option.
if directory:
prefix = os.path.commonprefix(
[os.path.normpath(b.resolve_output())
for _, b in bundles if b.output])
# dirname() gives the right value for a single file.
prefix = os.path.dirname(prefix)
to_build = []
for name, bundle in bundles:
# TODO: We really should support this. This error here
# is just in place of a less understandable error that would
# otherwise occur.
if bundle.is_container and directory:
raise CommandError(
'A custom output directory cannot currently be '
'used with container bundles.')
# Determine which filename to use, if not the default.
overwrite_filename = None
if output:
overwrite_filename = output[name]
elif directory:
offset = os.path.normpath(
bundle.resolve_output())[len(prefix)+1:]
overwrite_filename = os.path.join(directory, offset)
to_build.append((bundle, overwrite_filename, name,))
# Build.
built = []
for bundle, overwrite_filename, name in to_build:
if name:
# A name is not necessary available of the bundle was
# registered without one.
self.log.info("Building bundle: %s (to %s)" % (
name, overwrite_filename or bundle.output))
else:
self.log.info("Building bundle: %s" % bundle.output)
try:
if not overwrite_filename:
with bundle.bind(self.environment):
bundle.build(force=True, disable_cache=no_cache)
else:
# TODO: Rethink how we deal with container bundles here.
# As it currently stands, we write all child bundles
# to the target output, merged (which is also why we
# create and force writing to a StringIO instead of just
# using the ``Hunk`` objects that build() would return
# anyway.
output = StringIO()
with bundle.bind(self.environment):
bundle.build(force=True, output=output,
disable_cache=no_cache)
if directory:
# Only auto-create directories in this mode.
output_dir = os.path.dirname(overwrite_filename)
if not os.path.exists(output_dir):
os.makedirs(output_dir)
MemoryHunk(output.getvalue()).save(overwrite_filename)
built.append(bundle)
except BuildError as e:
self.log.error("Failed, error was: %s" % e)
if len(built):
self.event_handlers['post_build']()
if len(built) != len(to_build):
return 2
class WatchCommand(Command):
def __call__(self, loop=None):
"""Watch assets for changes.
``loop``
A callback, taking no arguments, to be called once every loop
iteration. Can be useful to integrate the command with other code.
If not specified, the loop wil call ``time.sleep()``.
"""
# TODO: This should probably also restart when the code changes.
mtimes = {}
try:
# Before starting to watch for changes, also recognize changes
# made while we did not run, and apply those immediately.
for bundle in self.environment:
print('Bringing up to date: %s' % bundle.output)
bundle.build(force=False)
self.log.info("Watching %d bundles for changes..." %
len(self.environment))
while True:
changed_bundles = self.check_for_changes(mtimes)
built = []
for bundle in changed_bundles:
print("Building bundle: %s ..." % bundle.output, end=' ')
sys.stdout.flush()
try:
bundle.build(force=True)
built.append(bundle)
except BuildError as e:
print("")
print("Failed: %s" % e)
else:
print("done")
if len(built):
self.event_handlers['post_build']()
do_end = loop() if loop else time.sleep(0.1)
if do_end:
break
except KeyboardInterrupt:
pass
def check_for_changes(self, mtimes):
# Do not update original mtimes dict right away, so that we detect
# all bundle changes if a file is in multiple bundles.
_new_mtimes = mtimes.copy()
changed_bundles = set()
# TODO: An optimization was lost here, skipping a bundle once
# a single file has been found to have changed. Bring back.
for filename, bundles_to_update in self.yield_files_to_watch():
stat = os.stat(filename)
mtime = stat.st_mtime
if sys.platform == "win32":
mtime -= stat.st_ctime
if mtimes.get(filename, mtime) != mtime:
if callable(bundles_to_update):
# Hook for when file has changed
try:
bundles_to_update = bundles_to_update()
except EnvironmentError:
# EnvironmentError is what the hooks is allowed to
# raise for a temporary problem, like an invalid config
import traceback
traceback.print_exc()
# Don't update anything, wait for another change
bundles_to_update = set()
if bundles_to_update is True:
# Indicates all bundles should be rebuilt for the change
bundles_to_update = set(self.environment)
changed_bundles |= bundles_to_update
_new_mtimes[filename] = mtime
_new_mtimes[filename] = mtime
mtimes.update(_new_mtimes)
return changed_bundles
def yield_files_to_watch(self):
for bundle in self.environment:
for filename in get_all_bundle_files(bundle):
yield filename, set([bundle])
class CleanCommand(Command):
def __call__(self):
"""Delete generated assets.
"""
self.log.info('Cleaning generated assets...')
for bundle in self.environment:
if not bundle.output:
continue
file_path = bundle.resolve_output(self.environment)
if os.path.exists(file_path):
os.unlink(file_path)
self.log.info("Deleted asset: %s" % bundle.output)
if isinstance(self.environment.cache, FilesystemCache):
shutil.rmtree(self.environment.cache.directory)
class CheckCommand(Command):
def __call__(self):
"""Check to see if assets need to be rebuilt.
A non-zero exit status will be returned if any of the input files are
newer (based on mtime) than their output file. This is intended to be
used in pre-commit hooks.
"""
needsupdate = False
updater = self.environment.updater
if not updater:
self.log.debug('no updater configured, using TimestampUpdater')
updater = TimestampUpdater()
for bundle in self.environment:
self.log.info('Checking asset: %s', bundle.output)
if updater.needs_rebuild(bundle, self.environment):
self.log.info(' needs update')
needsupdate = True
if needsupdate:
sys.exit(-1)
class CommandLineEnvironment(object):
"""Implements the core functionality for a command line frontend to
``webassets``, abstracted in a way to allow frameworks to integrate the
functionality into their own tools, for example, as a Django management
command, or a command for ``Flask-Script``.
"""
def __init__(self, env, log, post_build=None, commands=None):
self.environment = env
self.log = log
self.event_handlers = dict(post_build=lambda: True)
if callable(post_build):
self.event_handlers['post_build'] = post_build
# Instantiate each command
command_def = self.DefaultCommands.copy()
command_def.update(commands or {})
self.commands = {}
for name, construct in command_def.items():
if not construct:
continue
if not isinstance(construct, (list, tuple)):
construct = [construct, (), {}]
self.commands[name] = construct[0](
self, *construct[1], **construct[2])
def __getattr__(self, item):
# Allow method-like access to commands.
if item in self.commands:
return self.commands[item]
raise AttributeError(item)
def invoke(self, command, args):
"""Invoke ``command``, or throw a CommandError.
This is essentially a simple validation mechanism. Feel free
to call the individual command methods manually.
"""
try:
function = self.commands[command]
except KeyError as e:
raise CommandError('unknown command: %s' % e)
else:
return function(**args)
# List of commands installed
DefaultCommands = {
'build': BuildCommand,
'watch': WatchCommand,
'clean': CleanCommand,
'check': CheckCommand
}
class GenericArgparseImplementation(object):
"""Generic command line utility to interact with an webassets environment.
This is effectively a reference implementation of a command line utility
based on the ``CommandLineEnvironment`` class. Implementers may find it
feasible to simple base their own command line utility on this, rather than
implementing something custom on top of ``CommandLineEnvironment``. In
fact, if that is possible, you are encouraged to do so for greater
consistency across implementations.
"""
class WatchCommand(WatchCommand):
"""Extended watch command that also looks at the config file itself."""
def __init__(self, cmd_env, argparse_ns):
WatchCommand.__init__(self, cmd_env)
self.ns = argparse_ns
def yield_files_to_watch(self):
for result in WatchCommand.yield_files_to_watch(self):
yield result
# If the config changes, rebuild all bundles
if getattr(self.ns, 'config', None):
yield self.ns.config, self.reload_config
def reload_config(self):
try:
self.cmd.environment = YAMLLoader(self.ns.config).load_environment()
except Exception as e:
raise EnvironmentError(e)
return True
def __init__(self, env=None, log=None, prog=None, no_global_options=False):
try:
import argparse
except ImportError:
raise RuntimeError(
'The webassets command line now requires the '
'"argparse" library on Python versions <= 2.6.')
else:
self.argparse = argparse
self.env = env
self.log = log
self._construct_parser(prog, no_global_options)
def _construct_parser(self, prog=None, no_global_options=False):
self.parser = parser = self.argparse.ArgumentParser(
description="Manage assets.",
prog=prog)
if not no_global_options:
# Start with the base arguments that are valid for any command.
# XXX: Add those to the subparser?
parser.add_argument("-v", dest="verbose", action="store_true",
help="be verbose")
parser.add_argument("-q", action="store_true", dest="quiet",
help="be quiet")
if self.env is None:
loadenv = parser.add_mutually_exclusive_group()
loadenv.add_argument("-c", "--config", dest="config",
help="read environment from a YAML file")
loadenv.add_argument("-m", "--module", dest="module",
help="read environment from a Python module")
# Add subparsers.
subparsers = parser.add_subparsers(dest='command')
for command in CommandLineEnvironment.DefaultCommands.keys():
command_parser = subparsers.add_parser(command)
maker = getattr(self, 'make_%s_parser' % command, False)
if maker:
maker(command_parser)
@staticmethod
def make_build_parser(parser):
parser.add_argument(
'bundles', nargs='*', metavar='BUNDLE',
help='Optional bundle names to process. If none are '
'specified, then all known bundles will be built.')
parser.add_argument(
'--output', '-o', nargs=2, action='append',
metavar=('BUNDLE', 'FILE'),
help='Build the given bundle, and use a custom output '
'file. Can be given multiple times.')
parser.add_argument(
'--directory', '-d',
help='Write built files to this directory, using the '
'basename defined by the bundle. Will offset '
'the original bundle output paths on their common '
'prefix. Cannot be used with --output.')
parser.add_argument(
'--no-cache', action='store_true',
help='Do not use a cache that might be configured.')
parser.add_argument(
'--manifest',
help='Write a manifest to the given file. Also supports '
'the id:arg format, if you want to use a different '
'manifest implementation.')
parser.add_argument(
'--production', action='store_true',
help='Forcably turn off debug mode for the build. This '
'only has an effect if debug is set to "merge".')
def _setup_logging(self, ns):
if self.log:
log = self.log
else:
log = logging.getLogger('webassets.script')
if not log.handlers:
# In theory, this could run multiple times (e.g. tests)
handler = logging.StreamHandler()
log.addHandler(handler)
# Note that setting the level filter at the handler level is
# better than the logger level, since this is "our" handler,
# we create it, for the purposes of having a default output.
# The logger itself the user may be modifying.
handler.setLevel(logging.DEBUG if ns.verbose else (
logging.WARNING if ns.quiet else logging.INFO))
return log
def _setup_assets_env(self, ns, log):
env = self.env
if env is None:
assert not (ns.module and ns.config)
if ns.module:
env = PythonLoader(ns.module).load_environment()
if ns.config:
env = YAMLLoader(ns.config).load_environment()
return env
def _setup_cmd_env(self, assets_env, log, ns):
return CommandLineEnvironment(assets_env, log, commands={
'watch': (GenericArgparseImplementation.WatchCommand, (ns,), {})
})
def _prepare_command_args(self, ns):
# Prepare a dict of arguments cleaned of values that are not
# command-specific, and which the command method would not accept.
args = vars(ns).copy()
for action in self.parser._actions:
dest = action.dest
if dest in args:
del args[dest]
return args
def run_with_ns(self, ns):
log = self._setup_logging(ns)
env = self._setup_assets_env(ns, log)
if env is None:
raise CommandError(
"Error: No environment given or found. Maybe use -m?")
cmd = self._setup_cmd_env(env, log, ns)
# Run the selected command
args = self._prepare_command_args(ns)
return cmd.invoke(ns.command, args)
def run_with_argv(self, argv):
try:
ns = self.parser.parse_args(argv)
except SystemExit as e:
# We do not want the main() function to exit the program.
# See run() instead.
return e.args[0]
return self.run_with_ns(ns)
def main(self, argv):
"""Parse the given command line.
The commandline is expected to NOT including what would be sys.argv[0].
"""
try:
return self.run_with_argv(argv)
except CommandError as e:
print(e)
return 1
def main(argv, env=None):
"""Execute the generic version of the command line interface.
You only need to work directly with ``GenericArgparseImplementation`` if
you desire to customize things.
If no environment is given, additional arguments will be supported to allow
the user to specify/construct the environment on the command line.
"""
return GenericArgparseImplementation(env).main(argv)
def run():
"""Runs the command line interface via ``main``, then exits the process
with a proper return code."""
sys.exit(main(sys.argv[1:]) or 0)
if __name__ == '__main__':
run()
|
gi0baro/weppy-assets
|
weppy_assets/webassets/script.py
|
Python
|
bsd-3-clause
| 22,478 |
/* Copyright (c) 2016, Alexander Entinger / LXRobotics
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither the name of motor-controller-highpower-motorshield nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "ioentity.h"
#include <assert.h>
#include <algorithm>
namespace arduinoio {
/**
* @brief Constructor
* @param
*/
ioentity::ioentity(boost::shared_ptr<serial> const &serial) : m_serial(serial), m_isConfigured(false) {
}
/**
* @brief Destructor
*/
ioentity::~ioentity() {
m_pinVect.clear();
}
} // end of namespace arduinoio
|
lxrobotics/arduinoio
|
framework/ioentity.cpp
|
C++
|
bsd-3-clause
| 1,958 |
{-# LANGUAGE FlexibleInstances #-}
-- ghc options
{-# OPTIONS_GHC -Wall #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- {-# OPTIONS_GHC -fno-warn-name-shadowing #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
-- {-# OPTIONS_GHC -fno-warn-missing-signatures #-}
-- {-# OPTIONS_GHC -fno-warn-unused-do-bind #-}
-- {-# OPTIONS_GHC -fno-warn-incomplete-patterns #-}
-- {-# OPTIONS_GHC -fno-warn-incomplete-uni-patterns #-}
-- |
-- Copyright : (c) Andreas Reuleaux 2015
-- License : BSD2
-- Maintainer: Andreas Reuleaux <[email protected]>
-- Stability : experimental
-- Portability: non-portable
--
-- This module provides pretty printing functionality for Pire's
-- abstract and concrete syntax: names
module Pire.Pretty.Nm where
import Pire.Syntax.Nm
-- import Data.Text as T
import Text.PrettyPrint as TPP
import Pire.Pretty.Common
import Pire.Pretty.Ws()
-- instance Disp s => Disp (Nm_ s) where
-- disp (Nm_ nm (Ws ws)) = do
-- dnm <- disp nm
-- return $ dnm <> (text $ T.unpack ws)
instance Disp s => Disp (Nm1 s) where
disp (Nm1 nm) = disp nm
disp (Nm1_ nm ws) = do
dnm <- disp nm
dws <- disp ws
return $ dnm <> dws
instance Disp s => Disp (Nm s s) where
disp (Nm nm) = disp nm
disp (Nm_ nm ws) = do
dnm <- disp nm
dws <- disp ws
return $ dnm <> dws
|
reuleaux/pire
|
src/Pire/Pretty/Nm.hs
|
Haskell
|
bsd-3-clause
| 1,331 |
/*
xxHash - Extremely Fast Hash algorithm
Header File
Copyright (C) 2012-2016, Yann Collet.
BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php)
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
You can contact the author at :
- xxHash source repository : https://github.com/Cyan4973/xxHash
*/
/* Notice extracted from xxHash homepage :
xxHash is an extremely fast Hash algorithm, running at RAM speed limits.
It also successfully passes all tests from the SMHasher suite.
Comparison (single thread, Windows Seven 32 bits, using SMHasher on a Core 2 Duo
@3GHz)
Name Speed Q.Score Author
xxHash 5.4 GB/s 10
CrapWow 3.2 GB/s 2 Andrew
MumurHash 3a 2.7 GB/s 10 Austin Appleby
SpookyHash 2.0 GB/s 10 Bob Jenkins
SBox 1.4 GB/s 9 Bret Mulvey
Lookup3 1.2 GB/s 9 Bob Jenkins
SuperFastHash 1.2 GB/s 1 Paul Hsieh
CityHash64 1.05 GB/s 10 Pike & Alakuijala
FNV 0.55 GB/s 5 Fowler, Noll, Vo
CRC32 0.43 GB/s 9
MD5-32 0.33 GB/s 10 Ronald L. Rivest
SHA1-32 0.28 GB/s 10
Q.Score is a measure of quality of the hash function.
It depends on successfully passing SMHasher test set.
10 is a perfect score.
A 64-bit version, named XXH64, is available since r35.
It offers much better speed, but for 64-bit applications only.
Name Speed on 64 bits Speed on 32 bits
XXH64 13.8 GB/s 1.9 GB/s
XXH32 6.8 GB/s 6.0 GB/s
*/
#ifndef XXHASH_H_5627135585666179
#define XXHASH_H_5627135585666179 1
#if defined(__cplusplus)
extern "C" {
#endif
/* ****************************
* Definitions
******************************/
#include <stddef.h> /* size_t */
typedef enum { XXH_OK = 0, XXH_ERROR } XXH_errorcode;
/* ****************************
* API modifier
******************************/
/** XXH_INLINE_ALL (and XXH_PRIVATE_API)
* This is useful to include xxhash functions in `static` mode
* in order to inline them, and remove their symbol from the public list.
* Inlining can offer dramatic performance improvement on small keys.
* Methodology :
* #define XXH_INLINE_ALL
* #include "xxhash.h"
* `xxhash.c` is automatically included.
* It's not useful to compile and link it as a separate module.
*/
#if defined(XXH_INLINE_ALL) || defined(XXH_PRIVATE_API)
#ifndef XXH_STATIC_LINKING_ONLY
#define XXH_STATIC_LINKING_ONLY
#endif
#if defined(__GNUC__)
#define XXH_PUBLIC_API static __inline __attribute__((unused))
#elif defined(__cplusplus) || \
(defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) /* C99 */)
#define XXH_PUBLIC_API static inline
#elif defined(_MSC_VER)
#define XXH_PUBLIC_API static __inline
#else
/* this version may generate warnings for unused static functions */
#define XXH_PUBLIC_API static
#endif
#else
#define XXH_PUBLIC_API /* do nothing */
#endif /* XXH_INLINE_ALL || XXH_PRIVATE_API */
/*! XXH_NAMESPACE, aka Namespace Emulation :
*
* If you want to include _and expose_ xxHash functions from within your own
* library,
* but also want to avoid symbol collisions with other libraries which may also
* include xxHash,
*
* you can use XXH_NAMESPACE, to automatically prefix any public symbol from
* xxhash library
* with the value of XXH_NAMESPACE (therefore, avoid NULL and numeric values).
*
* Note that no change is required within the calling program as long as it
* includes `xxhash.h` :
* regular symbol name will be automatically translated by this header.
*/
#ifdef XXH_NAMESPACE
#define XXH_CAT(A, B) A##B
#define XXH_NAME2(A, B) XXH_CAT(A, B)
#define XXH_versionNumber XXH_NAME2(XXH_NAMESPACE, XXH_versionNumber)
#define XXH32 XXH_NAME2(XXH_NAMESPACE, XXH32)
#define XXH32_createState XXH_NAME2(XXH_NAMESPACE, XXH32_createState)
#define XXH32_freeState XXH_NAME2(XXH_NAMESPACE, XXH32_freeState)
#define XXH32_reset XXH_NAME2(XXH_NAMESPACE, XXH32_reset)
#define XXH32_update XXH_NAME2(XXH_NAMESPACE, XXH32_update)
#define XXH32_digest XXH_NAME2(XXH_NAMESPACE, XXH32_digest)
#define XXH32_copyState XXH_NAME2(XXH_NAMESPACE, XXH32_copyState)
#define XXH32_canonicalFromHash \
XXH_NAME2(XXH_NAMESPACE, XXH32_canonicalFromHash)
#define XXH32_hashFromCanonical \
XXH_NAME2(XXH_NAMESPACE, XXH32_hashFromCanonical)
#define XXH64 XXH_NAME2(XXH_NAMESPACE, XXH64)
#define XXH64_createState XXH_NAME2(XXH_NAMESPACE, XXH64_createState)
#define XXH64_freeState XXH_NAME2(XXH_NAMESPACE, XXH64_freeState)
#define XXH64_reset XXH_NAME2(XXH_NAMESPACE, XXH64_reset)
#define XXH64_update XXH_NAME2(XXH_NAMESPACE, XXH64_update)
#define XXH64_digest XXH_NAME2(XXH_NAMESPACE, XXH64_digest)
#define XXH64_copyState XXH_NAME2(XXH_NAMESPACE, XXH64_copyState)
#define XXH64_canonicalFromHash \
XXH_NAME2(XXH_NAMESPACE, XXH64_canonicalFromHash)
#define XXH64_hashFromCanonical \
XXH_NAME2(XXH_NAMESPACE, XXH64_hashFromCanonical)
#endif
/* *************************************
* Version
***************************************/
#define XXH_VERSION_MAJOR 0
#define XXH_VERSION_MINOR 6
#define XXH_VERSION_RELEASE 5
#define XXH_VERSION_NUMBER \
(XXH_VERSION_MAJOR * 100 * 100 + XXH_VERSION_MINOR * 100 + \
XXH_VERSION_RELEASE)
XXH_PUBLIC_API unsigned XXH_versionNumber(void);
/*-**********************************************************************
* 32-bit hash
************************************************************************/
typedef unsigned int XXH32_hash_t;
/*! XXH32() :
Calculate the 32-bit hash of sequence "length" bytes stored at memory
address "input".
The memory between input & input+length must be valid (allocated and
read-accessible).
"seed" can be used to alter the result predictably.
Speed on Core 2 Duo @ 3 GHz (single thread, SMHasher benchmark) : 5.4 GB/s
*/
XXH_PUBLIC_API XXH32_hash_t XXH32(const void *input, size_t length,
unsigned int seed);
/*====== Streaming ======*/
typedef struct XXH32_state_s XXH32_state_t; /* incomplete type */
XXH_PUBLIC_API XXH32_state_t *XXH32_createState(void);
XXH_PUBLIC_API XXH_errorcode XXH32_freeState(XXH32_state_t *statePtr);
XXH_PUBLIC_API void XXH32_copyState(XXH32_state_t *dst_state,
const XXH32_state_t *src_state);
XXH_PUBLIC_API XXH_errorcode XXH32_reset(XXH32_state_t *statePtr,
unsigned int seed);
XXH_PUBLIC_API XXH_errorcode XXH32_update(XXH32_state_t *statePtr,
const void *input, size_t length);
XXH_PUBLIC_API XXH32_hash_t XXH32_digest(const XXH32_state_t *statePtr);
/*
* Streaming functions generate the xxHash of an input provided in multiple
* segments.
* Note that, for small input, they are slower than single-call functions, due
* to state management.
* For small inputs, prefer `XXH32()` and `XXH64()`, which are better optimized.
*
* XXH state must first be allocated, using XXH*_createState() .
*
* Start a new hash by initializing state with a seed, using XXH*_reset().
*
* Then, feed the hash state by calling XXH*_update() as many times as
* necessary.
* The function returns an error code, with 0 meaning OK, and any other value
* meaning there is an error.
*
* Finally, a hash value can be produced anytime, by using XXH*_digest().
* This function returns the nn-bits hash as an int or long long.
*
* It's still possible to continue inserting input into the hash state after a
* digest,
* and generate some new hashes later on, by calling again XXH*_digest().
*
* When done, free XXH state space if it was allocated dynamically.
*/
/*====== Canonical representation ======*/
typedef struct {
unsigned char digest[4];
} XXH32_canonical_t;
XXH_PUBLIC_API void XXH32_canonicalFromHash(XXH32_canonical_t *dst,
XXH32_hash_t hash);
XXH_PUBLIC_API XXH32_hash_t
XXH32_hashFromCanonical(const XXH32_canonical_t *src);
/* Default result type for XXH functions are primitive unsigned 32 and 64 bits.
* The canonical representation uses human-readable write convention, aka
* big-endian (large digits first).
* These functions allow transformation of hash result into and from its
* canonical format.
* This way, hash values can be written into a file / memory, and remain
* comparable on different systems and programs.
*/
#ifndef XXH_NO_LONG_LONG
/*-**********************************************************************
* 64-bit hash
************************************************************************/
typedef unsigned long long XXH64_hash_t;
/*! XXH64() :
Calculate the 64-bit hash of sequence of length "len" stored at memory
address "input".
"seed" can be used to alter the result predictably.
This function runs faster on 64-bit systems, but slower on 32-bit systems
(see benchmark).
*/
XXH_PUBLIC_API XXH64_hash_t XXH64(const void *input, size_t length,
unsigned long long seed);
/*====== Streaming ======*/
typedef struct XXH64_state_s XXH64_state_t; /* incomplete type */
XXH_PUBLIC_API XXH64_state_t *XXH64_createState(void);
XXH_PUBLIC_API XXH_errorcode XXH64_freeState(XXH64_state_t *statePtr);
XXH_PUBLIC_API void XXH64_copyState(XXH64_state_t *dst_state,
const XXH64_state_t *src_state);
XXH_PUBLIC_API XXH_errorcode XXH64_reset(XXH64_state_t *statePtr,
unsigned long long seed);
XXH_PUBLIC_API XXH_errorcode XXH64_update(XXH64_state_t *statePtr,
const void *input, size_t length);
XXH_PUBLIC_API XXH64_hash_t XXH64_digest(const XXH64_state_t *statePtr);
/*====== Canonical representation ======*/
typedef struct {
unsigned char digest[8];
} XXH64_canonical_t;
XXH_PUBLIC_API void XXH64_canonicalFromHash(XXH64_canonical_t *dst,
XXH64_hash_t hash);
XXH_PUBLIC_API XXH64_hash_t
XXH64_hashFromCanonical(const XXH64_canonical_t *src);
#endif /* XXH_NO_LONG_LONG */
#ifdef XXH_STATIC_LINKING_ONLY
/* ================================================================================================
This section contains declarations which are not guaranteed to remain stable.
They may change in future versions, becoming incompatible with a different
version of the library.
These declarations should only be used with static linking.
Never use them in association with dynamic linking !
===================================================================================================
*/
/* These definitions are only present to allow
* static allocation of XXH state, on stack or in a struct for example.
* Never **ever** use members directly. */
#if !defined(__VMS) && \
(defined(__cplusplus) || \
(defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) /* C99 */))
#include <stdint.h>
struct XXH32_state_s {
uint32_t total_len_32;
uint32_t large_len;
uint32_t v1;
uint32_t v2;
uint32_t v3;
uint32_t v4;
uint32_t mem32[4];
uint32_t memsize;
uint32_t
reserved; /* never read nor write, might be removed in a future version */
}; /* typedef'd to XXH32_state_t */
struct XXH64_state_s {
uint64_t total_len;
uint64_t v1;
uint64_t v2;
uint64_t v3;
uint64_t v4;
uint64_t mem64[4];
uint32_t memsize;
uint32_t reserved[2]; /* never read nor write, might be removed in a future
version */
}; /* typedef'd to XXH64_state_t */
#else
struct XXH32_state_s {
unsigned total_len_32;
unsigned large_len;
unsigned v1;
unsigned v2;
unsigned v3;
unsigned v4;
unsigned mem32[4];
unsigned memsize;
unsigned
reserved; /* never read nor write, might be removed in a future version */
}; /* typedef'd to XXH32_state_t */
#ifndef XXH_NO_LONG_LONG /* remove 64-bit support */
struct XXH64_state_s {
unsigned long long total_len;
unsigned long long v1;
unsigned long long v2;
unsigned long long v3;
unsigned long long v4;
unsigned long long mem64[4];
unsigned memsize;
unsigned reserved[2]; /* never read nor write, might be removed in a future
version */
}; /* typedef'd to XXH64_state_t */
#endif
#endif
#if defined(XXH_INLINE_ALL) || defined(XXH_PRIVATE_API)
#include "xxhash.c" /* include xxhash function bodies as `static`, for inlining */
#endif
#endif /* XXH_STATIC_LINKING_ONLY */
#if defined(__cplusplus)
}
#endif
#endif /* XXHASH_H_5627135585666179 */
|
anton-povarov/pinba2
|
third_party/t1ha/tests/xxhash/xxhash.h
|
C
|
bsd-3-clause
| 14,323 |
/*
* Copyright (c) 2016, The OpenThread Authors.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. Neither the name of the copyright holder nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
/**
* @file alarm.c
* Platform abstraction for the alarm
*/
#include <openthread/platform/alarm-milli.h>
#include "platform-da15000.h"
#include "hw_timer0.h"
static bool sIsRunning = false;
static uint32_t sAlarm = 0;
static uint32_t sCounter;
volatile bool sAlarmFired = false;
static void timer0_interrupt_cb(void)
{
sCounter++;
}
void da15000AlarmProcess(otInstance *aInstance)
{
if ((sIsRunning) && (sAlarm <= sCounter))
{
sIsRunning = false;
otPlatAlarmMilliFired(aInstance);
}
}
void da15000AlarmInit(void)
{
hw_timer0_init(NULL);
hw_timer0_set_clock_source(HW_TIMER0_CLK_SRC_FAST);
hw_timer0_set_pwm_mode(HW_TIMER0_MODE_PWM);
hw_timer0_set_fast_clock_div(HW_TIMER0_FAST_CLK_DIV_4);
hw_timer0_set_t0_reload(0x07D0, 0x07D0);
hw_timer0_register_int(timer0_interrupt_cb);
hw_timer0_set_on_clock_div(false);
}
uint32_t otPlatAlarmMilliGetNow(void)
{
return sCounter;
}
void otPlatAlarmMilliStartAt(otInstance *aInstance, uint32_t t0, uint32_t dt)
{
OT_UNUSED_VARIABLE(aInstance);
sAlarm = t0 + dt;
sIsRunning = true;
if (sCounter == 0)
{
hw_timer0_enable();
}
hw_timer0_unfreeze();
}
void otPlatAlarmMilliStop(otInstance *aInstance)
{
OT_UNUSED_VARIABLE(aInstance);
sIsRunning = false;
hw_timer0_freeze();
}
|
erja-gp/openthread
|
examples/platforms/da15000/alarm.c
|
C
|
bsd-3-clause
| 2,963 |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from google.net.proto import ProtocolBuffer
import array
import dummy_thread as thread
__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
unusednames=printElemNumber,debug_strs no-special"""
if hasattr(ProtocolBuffer, 'ExtendableProtocolMessage'):
_extension_runtime = True
_ExtendableProtocolMessage = ProtocolBuffer.ExtendableProtocolMessage
else:
_extension_runtime = False
_ExtendableProtocolMessage = ProtocolBuffer.ProtocolMessage
from google.appengine.api.api_base_pb import *
import google.appengine.api.api_base_pb
from google.appengine.datastore.action_pb import *
import google.appengine.datastore.action_pb
from google.appengine.datastore.entity_pb import *
import google.appengine.datastore.entity_pb
from google.appengine.datastore.snapshot_pb import *
import google.appengine.datastore.snapshot_pb
class InternalHeader(ProtocolBuffer.ProtocolMessage):
has_requesting_app_id_ = 0
requesting_app_id_ = ""
has_requesting_project_id_ = 0
requesting_project_id_ = ""
has_requesting_version_id_ = 0
requesting_version_id_ = ""
has_api_settings_ = 0
api_settings_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def requesting_app_id(self): return self.requesting_app_id_
def set_requesting_app_id(self, x):
self.has_requesting_app_id_ = 1
self.requesting_app_id_ = x
def clear_requesting_app_id(self):
if self.has_requesting_app_id_:
self.has_requesting_app_id_ = 0
self.requesting_app_id_ = ""
def has_requesting_app_id(self): return self.has_requesting_app_id_
def requesting_project_id(self): return self.requesting_project_id_
def set_requesting_project_id(self, x):
self.has_requesting_project_id_ = 1
self.requesting_project_id_ = x
def clear_requesting_project_id(self):
if self.has_requesting_project_id_:
self.has_requesting_project_id_ = 0
self.requesting_project_id_ = ""
def has_requesting_project_id(self): return self.has_requesting_project_id_
def requesting_version_id(self): return self.requesting_version_id_
def set_requesting_version_id(self, x):
self.has_requesting_version_id_ = 1
self.requesting_version_id_ = x
def clear_requesting_version_id(self):
if self.has_requesting_version_id_:
self.has_requesting_version_id_ = 0
self.requesting_version_id_ = ""
def has_requesting_version_id(self): return self.has_requesting_version_id_
def api_settings(self): return self.api_settings_
def set_api_settings(self, x):
self.has_api_settings_ = 1
self.api_settings_ = x
def clear_api_settings(self):
if self.has_api_settings_:
self.has_api_settings_ = 0
self.api_settings_ = ""
def has_api_settings(self): return self.has_api_settings_
def MergeFrom(self, x):
assert x is not self
if (x.has_requesting_app_id()): self.set_requesting_app_id(x.requesting_app_id())
if (x.has_requesting_project_id()): self.set_requesting_project_id(x.requesting_project_id())
if (x.has_requesting_version_id()): self.set_requesting_version_id(x.requesting_version_id())
if (x.has_api_settings()): self.set_api_settings(x.api_settings())
def Equals(self, x):
if x is self: return 1
if self.has_requesting_app_id_ != x.has_requesting_app_id_: return 0
if self.has_requesting_app_id_ and self.requesting_app_id_ != x.requesting_app_id_: return 0
if self.has_requesting_project_id_ != x.has_requesting_project_id_: return 0
if self.has_requesting_project_id_ and self.requesting_project_id_ != x.requesting_project_id_: return 0
if self.has_requesting_version_id_ != x.has_requesting_version_id_: return 0
if self.has_requesting_version_id_ and self.requesting_version_id_ != x.requesting_version_id_: return 0
if self.has_api_settings_ != x.has_api_settings_: return 0
if self.has_api_settings_ and self.api_settings_ != x.api_settings_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
if (self.has_requesting_app_id_): n += 1 + self.lengthString(len(self.requesting_app_id_))
if (self.has_requesting_project_id_): n += 1 + self.lengthString(len(self.requesting_project_id_))
if (self.has_requesting_version_id_): n += 1 + self.lengthString(len(self.requesting_version_id_))
if (self.has_api_settings_): n += 1 + self.lengthString(len(self.api_settings_))
return n
def ByteSizePartial(self):
n = 0
if (self.has_requesting_app_id_): n += 1 + self.lengthString(len(self.requesting_app_id_))
if (self.has_requesting_project_id_): n += 1 + self.lengthString(len(self.requesting_project_id_))
if (self.has_requesting_version_id_): n += 1 + self.lengthString(len(self.requesting_version_id_))
if (self.has_api_settings_): n += 1 + self.lengthString(len(self.api_settings_))
return n
def Clear(self):
self.clear_requesting_app_id()
self.clear_requesting_project_id()
self.clear_requesting_version_id()
self.clear_api_settings()
def OutputUnchecked(self, out):
if (self.has_requesting_app_id_):
out.putVarInt32(18)
out.putPrefixedString(self.requesting_app_id_)
if (self.has_api_settings_):
out.putVarInt32(26)
out.putPrefixedString(self.api_settings_)
if (self.has_requesting_project_id_):
out.putVarInt32(34)
out.putPrefixedString(self.requesting_project_id_)
if (self.has_requesting_version_id_):
out.putVarInt32(42)
out.putPrefixedString(self.requesting_version_id_)
def OutputPartial(self, out):
if (self.has_requesting_app_id_):
out.putVarInt32(18)
out.putPrefixedString(self.requesting_app_id_)
if (self.has_api_settings_):
out.putVarInt32(26)
out.putPrefixedString(self.api_settings_)
if (self.has_requesting_project_id_):
out.putVarInt32(34)
out.putPrefixedString(self.requesting_project_id_)
if (self.has_requesting_version_id_):
out.putVarInt32(42)
out.putPrefixedString(self.requesting_version_id_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 18:
self.set_requesting_app_id(d.getPrefixedString())
continue
if tt == 26:
self.set_api_settings(d.getPrefixedString())
continue
if tt == 34:
self.set_requesting_project_id(d.getPrefixedString())
continue
if tt == 42:
self.set_requesting_version_id(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_requesting_app_id_: res+=prefix+("requesting_app_id: %s\n" % self.DebugFormatString(self.requesting_app_id_))
if self.has_requesting_project_id_: res+=prefix+("requesting_project_id: %s\n" % self.DebugFormatString(self.requesting_project_id_))
if self.has_requesting_version_id_: res+=prefix+("requesting_version_id: %s\n" % self.DebugFormatString(self.requesting_version_id_))
if self.has_api_settings_: res+=prefix+("api_settings: %s\n" % self.DebugFormatString(self.api_settings_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
krequesting_app_id = 2
krequesting_project_id = 4
krequesting_version_id = 5
kapi_settings = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
2: "requesting_app_id",
3: "api_settings",
4: "requesting_project_id",
5: "requesting_version_id",
}, 5)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.STRING,
4: ProtocolBuffer.Encoder.STRING,
5: ProtocolBuffer.Encoder.STRING,
}, 5, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.InternalHeader'
class Transaction(ProtocolBuffer.ProtocolMessage):
has_header_ = 0
header_ = None
has_handle_ = 0
handle_ = 0
has_app_ = 0
app_ = ""
has_mark_changes_ = 0
mark_changes_ = 0
def __init__(self, contents=None):
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def header(self):
if self.header_ is None:
self.lazy_init_lock_.acquire()
try:
if self.header_ is None: self.header_ = InternalHeader()
finally:
self.lazy_init_lock_.release()
return self.header_
def mutable_header(self): self.has_header_ = 1; return self.header()
def clear_header(self):
if self.has_header_:
self.has_header_ = 0;
if self.header_ is not None: self.header_.Clear()
def has_header(self): return self.has_header_
def handle(self): return self.handle_
def set_handle(self, x):
self.has_handle_ = 1
self.handle_ = x
def clear_handle(self):
if self.has_handle_:
self.has_handle_ = 0
self.handle_ = 0
def has_handle(self): return self.has_handle_
def app(self): return self.app_
def set_app(self, x):
self.has_app_ = 1
self.app_ = x
def clear_app(self):
if self.has_app_:
self.has_app_ = 0
self.app_ = ""
def has_app(self): return self.has_app_
def mark_changes(self): return self.mark_changes_
def set_mark_changes(self, x):
self.has_mark_changes_ = 1
self.mark_changes_ = x
def clear_mark_changes(self):
if self.has_mark_changes_:
self.has_mark_changes_ = 0
self.mark_changes_ = 0
def has_mark_changes(self): return self.has_mark_changes_
def MergeFrom(self, x):
assert x is not self
if (x.has_header()): self.mutable_header().MergeFrom(x.header())
if (x.has_handle()): self.set_handle(x.handle())
if (x.has_app()): self.set_app(x.app())
if (x.has_mark_changes()): self.set_mark_changes(x.mark_changes())
def Equals(self, x):
if x is self: return 1
if self.has_header_ != x.has_header_: return 0
if self.has_header_ and self.header_ != x.header_: return 0
if self.has_handle_ != x.has_handle_: return 0
if self.has_handle_ and self.handle_ != x.handle_: return 0
if self.has_app_ != x.has_app_: return 0
if self.has_app_ and self.app_ != x.app_: return 0
if self.has_mark_changes_ != x.has_mark_changes_: return 0
if self.has_mark_changes_ and self.mark_changes_ != x.mark_changes_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_header_ and not self.header_.IsInitialized(debug_strs)): initialized = 0
if (not self.has_handle_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: handle not set.')
if (not self.has_app_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: app not set.')
return initialized
def ByteSize(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSize())
n += self.lengthString(len(self.app_))
if (self.has_mark_changes_): n += 2
return n + 10
def ByteSizePartial(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSizePartial())
if (self.has_handle_):
n += 9
if (self.has_app_):
n += 1
n += self.lengthString(len(self.app_))
if (self.has_mark_changes_): n += 2
return n
def Clear(self):
self.clear_header()
self.clear_handle()
self.clear_app()
self.clear_mark_changes()
def OutputUnchecked(self, out):
out.putVarInt32(9)
out.put64(self.handle_)
out.putVarInt32(18)
out.putPrefixedString(self.app_)
if (self.has_mark_changes_):
out.putVarInt32(24)
out.putBoolean(self.mark_changes_)
if (self.has_header_):
out.putVarInt32(34)
out.putVarInt32(self.header_.ByteSize())
self.header_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_handle_):
out.putVarInt32(9)
out.put64(self.handle_)
if (self.has_app_):
out.putVarInt32(18)
out.putPrefixedString(self.app_)
if (self.has_mark_changes_):
out.putVarInt32(24)
out.putBoolean(self.mark_changes_)
if (self.has_header_):
out.putVarInt32(34)
out.putVarInt32(self.header_.ByteSizePartial())
self.header_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 9:
self.set_handle(d.get64())
continue
if tt == 18:
self.set_app(d.getPrefixedString())
continue
if tt == 24:
self.set_mark_changes(d.getBoolean())
continue
if tt == 34:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_header().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_header_:
res+=prefix+"header <\n"
res+=self.header_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_handle_: res+=prefix+("handle: %s\n" % self.DebugFormatFixed64(self.handle_))
if self.has_app_: res+=prefix+("app: %s\n" % self.DebugFormatString(self.app_))
if self.has_mark_changes_: res+=prefix+("mark_changes: %s\n" % self.DebugFormatBool(self.mark_changes_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kheader = 4
khandle = 1
kapp = 2
kmark_changes = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "handle",
2: "app",
3: "mark_changes",
4: "header",
}, 4)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.DOUBLE,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.STRING,
}, 4, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.Transaction'
class Query_Filter(ProtocolBuffer.ProtocolMessage):
LESS_THAN = 1
LESS_THAN_OR_EQUAL = 2
GREATER_THAN = 3
GREATER_THAN_OR_EQUAL = 4
EQUAL = 5
IN = 6
EXISTS = 7
_Operator_NAMES = {
1: "LESS_THAN",
2: "LESS_THAN_OR_EQUAL",
3: "GREATER_THAN",
4: "GREATER_THAN_OR_EQUAL",
5: "EQUAL",
6: "IN",
7: "EXISTS",
}
def Operator_Name(cls, x): return cls._Operator_NAMES.get(x, "")
Operator_Name = classmethod(Operator_Name)
has_op_ = 0
op_ = 0
def __init__(self, contents=None):
self.property_ = []
if contents is not None: self.MergeFromString(contents)
def op(self): return self.op_
def set_op(self, x):
self.has_op_ = 1
self.op_ = x
def clear_op(self):
if self.has_op_:
self.has_op_ = 0
self.op_ = 0
def has_op(self): return self.has_op_
def property_size(self): return len(self.property_)
def property_list(self): return self.property_
def property(self, i):
return self.property_[i]
def mutable_property(self, i):
return self.property_[i]
def add_property(self):
x = Property()
self.property_.append(x)
return x
def clear_property(self):
self.property_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_op()): self.set_op(x.op())
for i in xrange(x.property_size()): self.add_property().CopyFrom(x.property(i))
def Equals(self, x):
if x is self: return 1
if self.has_op_ != x.has_op_: return 0
if self.has_op_ and self.op_ != x.op_: return 0
if len(self.property_) != len(x.property_): return 0
for e1, e2 in zip(self.property_, x.property_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_op_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: op not set.')
for p in self.property_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.op_)
n += 1 * len(self.property_)
for i in xrange(len(self.property_)): n += self.lengthString(self.property_[i].ByteSize())
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_op_):
n += 1
n += self.lengthVarInt64(self.op_)
n += 1 * len(self.property_)
for i in xrange(len(self.property_)): n += self.lengthString(self.property_[i].ByteSizePartial())
return n
def Clear(self):
self.clear_op()
self.clear_property()
def OutputUnchecked(self, out):
out.putVarInt32(48)
out.putVarInt32(self.op_)
for i in xrange(len(self.property_)):
out.putVarInt32(114)
out.putVarInt32(self.property_[i].ByteSize())
self.property_[i].OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_op_):
out.putVarInt32(48)
out.putVarInt32(self.op_)
for i in xrange(len(self.property_)):
out.putVarInt32(114)
out.putVarInt32(self.property_[i].ByteSizePartial())
self.property_[i].OutputPartial(out)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 36: break
if tt == 48:
self.set_op(d.getVarInt32())
continue
if tt == 114:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_property().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_op_: res+=prefix+("op: %s\n" % self.DebugFormatInt32(self.op_))
cnt=0
for e in self.property_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("property%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
return res
class Query_Order(ProtocolBuffer.ProtocolMessage):
ASCENDING = 1
DESCENDING = 2
_Direction_NAMES = {
1: "ASCENDING",
2: "DESCENDING",
}
def Direction_Name(cls, x): return cls._Direction_NAMES.get(x, "")
Direction_Name = classmethod(Direction_Name)
has_property_ = 0
property_ = ""
has_direction_ = 0
direction_ = 1
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def property(self): return self.property_
def set_property(self, x):
self.has_property_ = 1
self.property_ = x
def clear_property(self):
if self.has_property_:
self.has_property_ = 0
self.property_ = ""
def has_property(self): return self.has_property_
def direction(self): return self.direction_
def set_direction(self, x):
self.has_direction_ = 1
self.direction_ = x
def clear_direction(self):
if self.has_direction_:
self.has_direction_ = 0
self.direction_ = 1
def has_direction(self): return self.has_direction_
def MergeFrom(self, x):
assert x is not self
if (x.has_property()): self.set_property(x.property())
if (x.has_direction()): self.set_direction(x.direction())
def Equals(self, x):
if x is self: return 1
if self.has_property_ != x.has_property_: return 0
if self.has_property_ and self.property_ != x.property_: return 0
if self.has_direction_ != x.has_direction_: return 0
if self.has_direction_ and self.direction_ != x.direction_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_property_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: property not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.property_))
if (self.has_direction_): n += 1 + self.lengthVarInt64(self.direction_)
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_property_):
n += 1
n += self.lengthString(len(self.property_))
if (self.has_direction_): n += 1 + self.lengthVarInt64(self.direction_)
return n
def Clear(self):
self.clear_property()
self.clear_direction()
def OutputUnchecked(self, out):
out.putVarInt32(82)
out.putPrefixedString(self.property_)
if (self.has_direction_):
out.putVarInt32(88)
out.putVarInt32(self.direction_)
def OutputPartial(self, out):
if (self.has_property_):
out.putVarInt32(82)
out.putPrefixedString(self.property_)
if (self.has_direction_):
out.putVarInt32(88)
out.putVarInt32(self.direction_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 76: break
if tt == 82:
self.set_property(d.getPrefixedString())
continue
if tt == 88:
self.set_direction(d.getVarInt32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_property_: res+=prefix+("property: %s\n" % self.DebugFormatString(self.property_))
if self.has_direction_: res+=prefix+("direction: %s\n" % self.DebugFormatInt32(self.direction_))
return res
class Query(ProtocolBuffer.ProtocolMessage):
ORDER_FIRST = 1
ANCESTOR_FIRST = 2
FILTER_FIRST = 3
_Hint_NAMES = {
1: "ORDER_FIRST",
2: "ANCESTOR_FIRST",
3: "FILTER_FIRST",
}
def Hint_Name(cls, x): return cls._Hint_NAMES.get(x, "")
Hint_Name = classmethod(Hint_Name)
has_header_ = 0
header_ = None
has_app_ = 0
app_ = ""
has_name_space_ = 0
name_space_ = ""
has_kind_ = 0
kind_ = ""
has_ancestor_ = 0
ancestor_ = None
has_search_query_ = 0
search_query_ = ""
has_hint_ = 0
hint_ = 0
has_count_ = 0
count_ = 0
has_offset_ = 0
offset_ = 0
has_limit_ = 0
limit_ = 0
has_compiled_cursor_ = 0
compiled_cursor_ = None
has_end_compiled_cursor_ = 0
end_compiled_cursor_ = None
has_require_perfect_plan_ = 0
require_perfect_plan_ = 0
has_keys_only_ = 0
keys_only_ = 0
has_transaction_ = 0
transaction_ = None
has_compile_ = 0
compile_ = 0
has_failover_ms_ = 0
failover_ms_ = 0
has_strong_ = 0
strong_ = 0
has_distinct_ = 0
distinct_ = 0
has_min_safe_time_seconds_ = 0
min_safe_time_seconds_ = 0
has_persist_offset_ = 0
persist_offset_ = 1
def __init__(self, contents=None):
self.filter_ = []
self.order_ = []
self.composite_index_ = []
self.property_name_ = []
self.group_by_property_name_ = []
self.safe_replica_name_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def header(self):
if self.header_ is None:
self.lazy_init_lock_.acquire()
try:
if self.header_ is None: self.header_ = InternalHeader()
finally:
self.lazy_init_lock_.release()
return self.header_
def mutable_header(self): self.has_header_ = 1; return self.header()
def clear_header(self):
if self.has_header_:
self.has_header_ = 0;
if self.header_ is not None: self.header_.Clear()
def has_header(self): return self.has_header_
def app(self): return self.app_
def set_app(self, x):
self.has_app_ = 1
self.app_ = x
def clear_app(self):
if self.has_app_:
self.has_app_ = 0
self.app_ = ""
def has_app(self): return self.has_app_
def name_space(self): return self.name_space_
def set_name_space(self, x):
self.has_name_space_ = 1
self.name_space_ = x
def clear_name_space(self):
if self.has_name_space_:
self.has_name_space_ = 0
self.name_space_ = ""
def has_name_space(self): return self.has_name_space_
def kind(self): return self.kind_
def set_kind(self, x):
self.has_kind_ = 1
self.kind_ = x
def clear_kind(self):
if self.has_kind_:
self.has_kind_ = 0
self.kind_ = ""
def has_kind(self): return self.has_kind_
def ancestor(self):
if self.ancestor_ is None:
self.lazy_init_lock_.acquire()
try:
if self.ancestor_ is None: self.ancestor_ = Reference()
finally:
self.lazy_init_lock_.release()
return self.ancestor_
def mutable_ancestor(self): self.has_ancestor_ = 1; return self.ancestor()
def clear_ancestor(self):
if self.has_ancestor_:
self.has_ancestor_ = 0;
if self.ancestor_ is not None: self.ancestor_.Clear()
def has_ancestor(self): return self.has_ancestor_
def filter_size(self): return len(self.filter_)
def filter_list(self): return self.filter_
def filter(self, i):
return self.filter_[i]
def mutable_filter(self, i):
return self.filter_[i]
def add_filter(self):
x = Query_Filter()
self.filter_.append(x)
return x
def clear_filter(self):
self.filter_ = []
def search_query(self): return self.search_query_
def set_search_query(self, x):
self.has_search_query_ = 1
self.search_query_ = x
def clear_search_query(self):
if self.has_search_query_:
self.has_search_query_ = 0
self.search_query_ = ""
def has_search_query(self): return self.has_search_query_
def order_size(self): return len(self.order_)
def order_list(self): return self.order_
def order(self, i):
return self.order_[i]
def mutable_order(self, i):
return self.order_[i]
def add_order(self):
x = Query_Order()
self.order_.append(x)
return x
def clear_order(self):
self.order_ = []
def hint(self): return self.hint_
def set_hint(self, x):
self.has_hint_ = 1
self.hint_ = x
def clear_hint(self):
if self.has_hint_:
self.has_hint_ = 0
self.hint_ = 0
def has_hint(self): return self.has_hint_
def count(self): return self.count_
def set_count(self, x):
self.has_count_ = 1
self.count_ = x
def clear_count(self):
if self.has_count_:
self.has_count_ = 0
self.count_ = 0
def has_count(self): return self.has_count_
def offset(self): return self.offset_
def set_offset(self, x):
self.has_offset_ = 1
self.offset_ = x
def clear_offset(self):
if self.has_offset_:
self.has_offset_ = 0
self.offset_ = 0
def has_offset(self): return self.has_offset_
def limit(self): return self.limit_
def set_limit(self, x):
self.has_limit_ = 1
self.limit_ = x
def clear_limit(self):
if self.has_limit_:
self.has_limit_ = 0
self.limit_ = 0
def has_limit(self): return self.has_limit_
def compiled_cursor(self):
if self.compiled_cursor_ is None:
self.lazy_init_lock_.acquire()
try:
if self.compiled_cursor_ is None: self.compiled_cursor_ = CompiledCursor()
finally:
self.lazy_init_lock_.release()
return self.compiled_cursor_
def mutable_compiled_cursor(self): self.has_compiled_cursor_ = 1; return self.compiled_cursor()
def clear_compiled_cursor(self):
if self.has_compiled_cursor_:
self.has_compiled_cursor_ = 0;
if self.compiled_cursor_ is not None: self.compiled_cursor_.Clear()
def has_compiled_cursor(self): return self.has_compiled_cursor_
def end_compiled_cursor(self):
if self.end_compiled_cursor_ is None:
self.lazy_init_lock_.acquire()
try:
if self.end_compiled_cursor_ is None: self.end_compiled_cursor_ = CompiledCursor()
finally:
self.lazy_init_lock_.release()
return self.end_compiled_cursor_
def mutable_end_compiled_cursor(self): self.has_end_compiled_cursor_ = 1; return self.end_compiled_cursor()
def clear_end_compiled_cursor(self):
if self.has_end_compiled_cursor_:
self.has_end_compiled_cursor_ = 0;
if self.end_compiled_cursor_ is not None: self.end_compiled_cursor_.Clear()
def has_end_compiled_cursor(self): return self.has_end_compiled_cursor_
def composite_index_size(self): return len(self.composite_index_)
def composite_index_list(self): return self.composite_index_
def composite_index(self, i):
return self.composite_index_[i]
def mutable_composite_index(self, i):
return self.composite_index_[i]
def add_composite_index(self):
x = CompositeIndex()
self.composite_index_.append(x)
return x
def clear_composite_index(self):
self.composite_index_ = []
def require_perfect_plan(self): return self.require_perfect_plan_
def set_require_perfect_plan(self, x):
self.has_require_perfect_plan_ = 1
self.require_perfect_plan_ = x
def clear_require_perfect_plan(self):
if self.has_require_perfect_plan_:
self.has_require_perfect_plan_ = 0
self.require_perfect_plan_ = 0
def has_require_perfect_plan(self): return self.has_require_perfect_plan_
def keys_only(self): return self.keys_only_
def set_keys_only(self, x):
self.has_keys_only_ = 1
self.keys_only_ = x
def clear_keys_only(self):
if self.has_keys_only_:
self.has_keys_only_ = 0
self.keys_only_ = 0
def has_keys_only(self): return self.has_keys_only_
def transaction(self):
if self.transaction_ is None:
self.lazy_init_lock_.acquire()
try:
if self.transaction_ is None: self.transaction_ = Transaction()
finally:
self.lazy_init_lock_.release()
return self.transaction_
def mutable_transaction(self): self.has_transaction_ = 1; return self.transaction()
def clear_transaction(self):
if self.has_transaction_:
self.has_transaction_ = 0;
if self.transaction_ is not None: self.transaction_.Clear()
def has_transaction(self): return self.has_transaction_
def compile(self): return self.compile_
def set_compile(self, x):
self.has_compile_ = 1
self.compile_ = x
def clear_compile(self):
if self.has_compile_:
self.has_compile_ = 0
self.compile_ = 0
def has_compile(self): return self.has_compile_
def failover_ms(self): return self.failover_ms_
def set_failover_ms(self, x):
self.has_failover_ms_ = 1
self.failover_ms_ = x
def clear_failover_ms(self):
if self.has_failover_ms_:
self.has_failover_ms_ = 0
self.failover_ms_ = 0
def has_failover_ms(self): return self.has_failover_ms_
def strong(self): return self.strong_
def set_strong(self, x):
self.has_strong_ = 1
self.strong_ = x
def clear_strong(self):
if self.has_strong_:
self.has_strong_ = 0
self.strong_ = 0
def has_strong(self): return self.has_strong_
def property_name_size(self): return len(self.property_name_)
def property_name_list(self): return self.property_name_
def property_name(self, i):
return self.property_name_[i]
def set_property_name(self, i, x):
self.property_name_[i] = x
def add_property_name(self, x):
self.property_name_.append(x)
def clear_property_name(self):
self.property_name_ = []
def group_by_property_name_size(self): return len(self.group_by_property_name_)
def group_by_property_name_list(self): return self.group_by_property_name_
def group_by_property_name(self, i):
return self.group_by_property_name_[i]
def set_group_by_property_name(self, i, x):
self.group_by_property_name_[i] = x
def add_group_by_property_name(self, x):
self.group_by_property_name_.append(x)
def clear_group_by_property_name(self):
self.group_by_property_name_ = []
def distinct(self): return self.distinct_
def set_distinct(self, x):
self.has_distinct_ = 1
self.distinct_ = x
def clear_distinct(self):
if self.has_distinct_:
self.has_distinct_ = 0
self.distinct_ = 0
def has_distinct(self): return self.has_distinct_
def min_safe_time_seconds(self): return self.min_safe_time_seconds_
def set_min_safe_time_seconds(self, x):
self.has_min_safe_time_seconds_ = 1
self.min_safe_time_seconds_ = x
def clear_min_safe_time_seconds(self):
if self.has_min_safe_time_seconds_:
self.has_min_safe_time_seconds_ = 0
self.min_safe_time_seconds_ = 0
def has_min_safe_time_seconds(self): return self.has_min_safe_time_seconds_
def safe_replica_name_size(self): return len(self.safe_replica_name_)
def safe_replica_name_list(self): return self.safe_replica_name_
def safe_replica_name(self, i):
return self.safe_replica_name_[i]
def set_safe_replica_name(self, i, x):
self.safe_replica_name_[i] = x
def add_safe_replica_name(self, x):
self.safe_replica_name_.append(x)
def clear_safe_replica_name(self):
self.safe_replica_name_ = []
def persist_offset(self): return self.persist_offset_
def set_persist_offset(self, x):
self.has_persist_offset_ = 1
self.persist_offset_ = x
def clear_persist_offset(self):
if self.has_persist_offset_:
self.has_persist_offset_ = 0
self.persist_offset_ = 1
def has_persist_offset(self): return self.has_persist_offset_
def MergeFrom(self, x):
assert x is not self
if (x.has_header()): self.mutable_header().MergeFrom(x.header())
if (x.has_app()): self.set_app(x.app())
if (x.has_name_space()): self.set_name_space(x.name_space())
if (x.has_kind()): self.set_kind(x.kind())
if (x.has_ancestor()): self.mutable_ancestor().MergeFrom(x.ancestor())
for i in xrange(x.filter_size()): self.add_filter().CopyFrom(x.filter(i))
if (x.has_search_query()): self.set_search_query(x.search_query())
for i in xrange(x.order_size()): self.add_order().CopyFrom(x.order(i))
if (x.has_hint()): self.set_hint(x.hint())
if (x.has_count()): self.set_count(x.count())
if (x.has_offset()): self.set_offset(x.offset())
if (x.has_limit()): self.set_limit(x.limit())
if (x.has_compiled_cursor()): self.mutable_compiled_cursor().MergeFrom(x.compiled_cursor())
if (x.has_end_compiled_cursor()): self.mutable_end_compiled_cursor().MergeFrom(x.end_compiled_cursor())
for i in xrange(x.composite_index_size()): self.add_composite_index().CopyFrom(x.composite_index(i))
if (x.has_require_perfect_plan()): self.set_require_perfect_plan(x.require_perfect_plan())
if (x.has_keys_only()): self.set_keys_only(x.keys_only())
if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction())
if (x.has_compile()): self.set_compile(x.compile())
if (x.has_failover_ms()): self.set_failover_ms(x.failover_ms())
if (x.has_strong()): self.set_strong(x.strong())
for i in xrange(x.property_name_size()): self.add_property_name(x.property_name(i))
for i in xrange(x.group_by_property_name_size()): self.add_group_by_property_name(x.group_by_property_name(i))
if (x.has_distinct()): self.set_distinct(x.distinct())
if (x.has_min_safe_time_seconds()): self.set_min_safe_time_seconds(x.min_safe_time_seconds())
for i in xrange(x.safe_replica_name_size()): self.add_safe_replica_name(x.safe_replica_name(i))
if (x.has_persist_offset()): self.set_persist_offset(x.persist_offset())
def Equals(self, x):
if x is self: return 1
if self.has_header_ != x.has_header_: return 0
if self.has_header_ and self.header_ != x.header_: return 0
if self.has_app_ != x.has_app_: return 0
if self.has_app_ and self.app_ != x.app_: return 0
if self.has_name_space_ != x.has_name_space_: return 0
if self.has_name_space_ and self.name_space_ != x.name_space_: return 0
if self.has_kind_ != x.has_kind_: return 0
if self.has_kind_ and self.kind_ != x.kind_: return 0
if self.has_ancestor_ != x.has_ancestor_: return 0
if self.has_ancestor_ and self.ancestor_ != x.ancestor_: return 0
if len(self.filter_) != len(x.filter_): return 0
for e1, e2 in zip(self.filter_, x.filter_):
if e1 != e2: return 0
if self.has_search_query_ != x.has_search_query_: return 0
if self.has_search_query_ and self.search_query_ != x.search_query_: return 0
if len(self.order_) != len(x.order_): return 0
for e1, e2 in zip(self.order_, x.order_):
if e1 != e2: return 0
if self.has_hint_ != x.has_hint_: return 0
if self.has_hint_ and self.hint_ != x.hint_: return 0
if self.has_count_ != x.has_count_: return 0
if self.has_count_ and self.count_ != x.count_: return 0
if self.has_offset_ != x.has_offset_: return 0
if self.has_offset_ and self.offset_ != x.offset_: return 0
if self.has_limit_ != x.has_limit_: return 0
if self.has_limit_ and self.limit_ != x.limit_: return 0
if self.has_compiled_cursor_ != x.has_compiled_cursor_: return 0
if self.has_compiled_cursor_ and self.compiled_cursor_ != x.compiled_cursor_: return 0
if self.has_end_compiled_cursor_ != x.has_end_compiled_cursor_: return 0
if self.has_end_compiled_cursor_ and self.end_compiled_cursor_ != x.end_compiled_cursor_: return 0
if len(self.composite_index_) != len(x.composite_index_): return 0
for e1, e2 in zip(self.composite_index_, x.composite_index_):
if e1 != e2: return 0
if self.has_require_perfect_plan_ != x.has_require_perfect_plan_: return 0
if self.has_require_perfect_plan_ and self.require_perfect_plan_ != x.require_perfect_plan_: return 0
if self.has_keys_only_ != x.has_keys_only_: return 0
if self.has_keys_only_ and self.keys_only_ != x.keys_only_: return 0
if self.has_transaction_ != x.has_transaction_: return 0
if self.has_transaction_ and self.transaction_ != x.transaction_: return 0
if self.has_compile_ != x.has_compile_: return 0
if self.has_compile_ and self.compile_ != x.compile_: return 0
if self.has_failover_ms_ != x.has_failover_ms_: return 0
if self.has_failover_ms_ and self.failover_ms_ != x.failover_ms_: return 0
if self.has_strong_ != x.has_strong_: return 0
if self.has_strong_ and self.strong_ != x.strong_: return 0
if len(self.property_name_) != len(x.property_name_): return 0
for e1, e2 in zip(self.property_name_, x.property_name_):
if e1 != e2: return 0
if len(self.group_by_property_name_) != len(x.group_by_property_name_): return 0
for e1, e2 in zip(self.group_by_property_name_, x.group_by_property_name_):
if e1 != e2: return 0
if self.has_distinct_ != x.has_distinct_: return 0
if self.has_distinct_ and self.distinct_ != x.distinct_: return 0
if self.has_min_safe_time_seconds_ != x.has_min_safe_time_seconds_: return 0
if self.has_min_safe_time_seconds_ and self.min_safe_time_seconds_ != x.min_safe_time_seconds_: return 0
if len(self.safe_replica_name_) != len(x.safe_replica_name_): return 0
for e1, e2 in zip(self.safe_replica_name_, x.safe_replica_name_):
if e1 != e2: return 0
if self.has_persist_offset_ != x.has_persist_offset_: return 0
if self.has_persist_offset_ and self.persist_offset_ != x.persist_offset_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_header_ and not self.header_.IsInitialized(debug_strs)): initialized = 0
if (not self.has_app_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: app not set.')
if (self.has_ancestor_ and not self.ancestor_.IsInitialized(debug_strs)): initialized = 0
for p in self.filter_:
if not p.IsInitialized(debug_strs): initialized=0
for p in self.order_:
if not p.IsInitialized(debug_strs): initialized=0
if (self.has_compiled_cursor_ and not self.compiled_cursor_.IsInitialized(debug_strs)): initialized = 0
if (self.has_end_compiled_cursor_ and not self.end_compiled_cursor_.IsInitialized(debug_strs)): initialized = 0
for p in self.composite_index_:
if not p.IsInitialized(debug_strs): initialized=0
if (self.has_transaction_ and not self.transaction_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_header_): n += 2 + self.lengthString(self.header_.ByteSize())
n += self.lengthString(len(self.app_))
if (self.has_name_space_): n += 2 + self.lengthString(len(self.name_space_))
if (self.has_kind_): n += 1 + self.lengthString(len(self.kind_))
if (self.has_ancestor_): n += 2 + self.lengthString(self.ancestor_.ByteSize())
n += 2 * len(self.filter_)
for i in xrange(len(self.filter_)): n += self.filter_[i].ByteSize()
if (self.has_search_query_): n += 1 + self.lengthString(len(self.search_query_))
n += 2 * len(self.order_)
for i in xrange(len(self.order_)): n += self.order_[i].ByteSize()
if (self.has_hint_): n += 2 + self.lengthVarInt64(self.hint_)
if (self.has_count_): n += 2 + self.lengthVarInt64(self.count_)
if (self.has_offset_): n += 1 + self.lengthVarInt64(self.offset_)
if (self.has_limit_): n += 2 + self.lengthVarInt64(self.limit_)
if (self.has_compiled_cursor_): n += 2 + self.lengthString(self.compiled_cursor_.ByteSize())
if (self.has_end_compiled_cursor_): n += 2 + self.lengthString(self.end_compiled_cursor_.ByteSize())
n += 2 * len(self.composite_index_)
for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSize())
if (self.has_require_perfect_plan_): n += 3
if (self.has_keys_only_): n += 3
if (self.has_transaction_): n += 2 + self.lengthString(self.transaction_.ByteSize())
if (self.has_compile_): n += 3
if (self.has_failover_ms_): n += 2 + self.lengthVarInt64(self.failover_ms_)
if (self.has_strong_): n += 3
n += 2 * len(self.property_name_)
for i in xrange(len(self.property_name_)): n += self.lengthString(len(self.property_name_[i]))
n += 2 * len(self.group_by_property_name_)
for i in xrange(len(self.group_by_property_name_)): n += self.lengthString(len(self.group_by_property_name_[i]))
if (self.has_distinct_): n += 3
if (self.has_min_safe_time_seconds_): n += 2 + self.lengthVarInt64(self.min_safe_time_seconds_)
n += 2 * len(self.safe_replica_name_)
for i in xrange(len(self.safe_replica_name_)): n += self.lengthString(len(self.safe_replica_name_[i]))
if (self.has_persist_offset_): n += 3
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_header_): n += 2 + self.lengthString(self.header_.ByteSizePartial())
if (self.has_app_):
n += 1
n += self.lengthString(len(self.app_))
if (self.has_name_space_): n += 2 + self.lengthString(len(self.name_space_))
if (self.has_kind_): n += 1 + self.lengthString(len(self.kind_))
if (self.has_ancestor_): n += 2 + self.lengthString(self.ancestor_.ByteSizePartial())
n += 2 * len(self.filter_)
for i in xrange(len(self.filter_)): n += self.filter_[i].ByteSizePartial()
if (self.has_search_query_): n += 1 + self.lengthString(len(self.search_query_))
n += 2 * len(self.order_)
for i in xrange(len(self.order_)): n += self.order_[i].ByteSizePartial()
if (self.has_hint_): n += 2 + self.lengthVarInt64(self.hint_)
if (self.has_count_): n += 2 + self.lengthVarInt64(self.count_)
if (self.has_offset_): n += 1 + self.lengthVarInt64(self.offset_)
if (self.has_limit_): n += 2 + self.lengthVarInt64(self.limit_)
if (self.has_compiled_cursor_): n += 2 + self.lengthString(self.compiled_cursor_.ByteSizePartial())
if (self.has_end_compiled_cursor_): n += 2 + self.lengthString(self.end_compiled_cursor_.ByteSizePartial())
n += 2 * len(self.composite_index_)
for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSizePartial())
if (self.has_require_perfect_plan_): n += 3
if (self.has_keys_only_): n += 3
if (self.has_transaction_): n += 2 + self.lengthString(self.transaction_.ByteSizePartial())
if (self.has_compile_): n += 3
if (self.has_failover_ms_): n += 2 + self.lengthVarInt64(self.failover_ms_)
if (self.has_strong_): n += 3
n += 2 * len(self.property_name_)
for i in xrange(len(self.property_name_)): n += self.lengthString(len(self.property_name_[i]))
n += 2 * len(self.group_by_property_name_)
for i in xrange(len(self.group_by_property_name_)): n += self.lengthString(len(self.group_by_property_name_[i]))
if (self.has_distinct_): n += 3
if (self.has_min_safe_time_seconds_): n += 2 + self.lengthVarInt64(self.min_safe_time_seconds_)
n += 2 * len(self.safe_replica_name_)
for i in xrange(len(self.safe_replica_name_)): n += self.lengthString(len(self.safe_replica_name_[i]))
if (self.has_persist_offset_): n += 3
return n
def Clear(self):
self.clear_header()
self.clear_app()
self.clear_name_space()
self.clear_kind()
self.clear_ancestor()
self.clear_filter()
self.clear_search_query()
self.clear_order()
self.clear_hint()
self.clear_count()
self.clear_offset()
self.clear_limit()
self.clear_compiled_cursor()
self.clear_end_compiled_cursor()
self.clear_composite_index()
self.clear_require_perfect_plan()
self.clear_keys_only()
self.clear_transaction()
self.clear_compile()
self.clear_failover_ms()
self.clear_strong()
self.clear_property_name()
self.clear_group_by_property_name()
self.clear_distinct()
self.clear_min_safe_time_seconds()
self.clear_safe_replica_name()
self.clear_persist_offset()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.app_)
if (self.has_kind_):
out.putVarInt32(26)
out.putPrefixedString(self.kind_)
for i in xrange(len(self.filter_)):
out.putVarInt32(35)
self.filter_[i].OutputUnchecked(out)
out.putVarInt32(36)
if (self.has_search_query_):
out.putVarInt32(66)
out.putPrefixedString(self.search_query_)
for i in xrange(len(self.order_)):
out.putVarInt32(75)
self.order_[i].OutputUnchecked(out)
out.putVarInt32(76)
if (self.has_offset_):
out.putVarInt32(96)
out.putVarInt32(self.offset_)
if (self.has_limit_):
out.putVarInt32(128)
out.putVarInt32(self.limit_)
if (self.has_ancestor_):
out.putVarInt32(138)
out.putVarInt32(self.ancestor_.ByteSize())
self.ancestor_.OutputUnchecked(out)
if (self.has_hint_):
out.putVarInt32(144)
out.putVarInt32(self.hint_)
for i in xrange(len(self.composite_index_)):
out.putVarInt32(154)
out.putVarInt32(self.composite_index_[i].ByteSize())
self.composite_index_[i].OutputUnchecked(out)
if (self.has_require_perfect_plan_):
out.putVarInt32(160)
out.putBoolean(self.require_perfect_plan_)
if (self.has_keys_only_):
out.putVarInt32(168)
out.putBoolean(self.keys_only_)
if (self.has_transaction_):
out.putVarInt32(178)
out.putVarInt32(self.transaction_.ByteSize())
self.transaction_.OutputUnchecked(out)
if (self.has_count_):
out.putVarInt32(184)
out.putVarInt32(self.count_)
if (self.has_distinct_):
out.putVarInt32(192)
out.putBoolean(self.distinct_)
if (self.has_compile_):
out.putVarInt32(200)
out.putBoolean(self.compile_)
if (self.has_failover_ms_):
out.putVarInt32(208)
out.putVarInt64(self.failover_ms_)
if (self.has_name_space_):
out.putVarInt32(234)
out.putPrefixedString(self.name_space_)
if (self.has_compiled_cursor_):
out.putVarInt32(242)
out.putVarInt32(self.compiled_cursor_.ByteSize())
self.compiled_cursor_.OutputUnchecked(out)
if (self.has_end_compiled_cursor_):
out.putVarInt32(250)
out.putVarInt32(self.end_compiled_cursor_.ByteSize())
self.end_compiled_cursor_.OutputUnchecked(out)
if (self.has_strong_):
out.putVarInt32(256)
out.putBoolean(self.strong_)
for i in xrange(len(self.property_name_)):
out.putVarInt32(266)
out.putPrefixedString(self.property_name_[i])
for i in xrange(len(self.group_by_property_name_)):
out.putVarInt32(274)
out.putPrefixedString(self.group_by_property_name_[i])
if (self.has_min_safe_time_seconds_):
out.putVarInt32(280)
out.putVarInt64(self.min_safe_time_seconds_)
for i in xrange(len(self.safe_replica_name_)):
out.putVarInt32(290)
out.putPrefixedString(self.safe_replica_name_[i])
if (self.has_persist_offset_):
out.putVarInt32(296)
out.putBoolean(self.persist_offset_)
if (self.has_header_):
out.putVarInt32(314)
out.putVarInt32(self.header_.ByteSize())
self.header_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_app_):
out.putVarInt32(10)
out.putPrefixedString(self.app_)
if (self.has_kind_):
out.putVarInt32(26)
out.putPrefixedString(self.kind_)
for i in xrange(len(self.filter_)):
out.putVarInt32(35)
self.filter_[i].OutputPartial(out)
out.putVarInt32(36)
if (self.has_search_query_):
out.putVarInt32(66)
out.putPrefixedString(self.search_query_)
for i in xrange(len(self.order_)):
out.putVarInt32(75)
self.order_[i].OutputPartial(out)
out.putVarInt32(76)
if (self.has_offset_):
out.putVarInt32(96)
out.putVarInt32(self.offset_)
if (self.has_limit_):
out.putVarInt32(128)
out.putVarInt32(self.limit_)
if (self.has_ancestor_):
out.putVarInt32(138)
out.putVarInt32(self.ancestor_.ByteSizePartial())
self.ancestor_.OutputPartial(out)
if (self.has_hint_):
out.putVarInt32(144)
out.putVarInt32(self.hint_)
for i in xrange(len(self.composite_index_)):
out.putVarInt32(154)
out.putVarInt32(self.composite_index_[i].ByteSizePartial())
self.composite_index_[i].OutputPartial(out)
if (self.has_require_perfect_plan_):
out.putVarInt32(160)
out.putBoolean(self.require_perfect_plan_)
if (self.has_keys_only_):
out.putVarInt32(168)
out.putBoolean(self.keys_only_)
if (self.has_transaction_):
out.putVarInt32(178)
out.putVarInt32(self.transaction_.ByteSizePartial())
self.transaction_.OutputPartial(out)
if (self.has_count_):
out.putVarInt32(184)
out.putVarInt32(self.count_)
if (self.has_distinct_):
out.putVarInt32(192)
out.putBoolean(self.distinct_)
if (self.has_compile_):
out.putVarInt32(200)
out.putBoolean(self.compile_)
if (self.has_failover_ms_):
out.putVarInt32(208)
out.putVarInt64(self.failover_ms_)
if (self.has_name_space_):
out.putVarInt32(234)
out.putPrefixedString(self.name_space_)
if (self.has_compiled_cursor_):
out.putVarInt32(242)
out.putVarInt32(self.compiled_cursor_.ByteSizePartial())
self.compiled_cursor_.OutputPartial(out)
if (self.has_end_compiled_cursor_):
out.putVarInt32(250)
out.putVarInt32(self.end_compiled_cursor_.ByteSizePartial())
self.end_compiled_cursor_.OutputPartial(out)
if (self.has_strong_):
out.putVarInt32(256)
out.putBoolean(self.strong_)
for i in xrange(len(self.property_name_)):
out.putVarInt32(266)
out.putPrefixedString(self.property_name_[i])
for i in xrange(len(self.group_by_property_name_)):
out.putVarInt32(274)
out.putPrefixedString(self.group_by_property_name_[i])
if (self.has_min_safe_time_seconds_):
out.putVarInt32(280)
out.putVarInt64(self.min_safe_time_seconds_)
for i in xrange(len(self.safe_replica_name_)):
out.putVarInt32(290)
out.putPrefixedString(self.safe_replica_name_[i])
if (self.has_persist_offset_):
out.putVarInt32(296)
out.putBoolean(self.persist_offset_)
if (self.has_header_):
out.putVarInt32(314)
out.putVarInt32(self.header_.ByteSizePartial())
self.header_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_app(d.getPrefixedString())
continue
if tt == 26:
self.set_kind(d.getPrefixedString())
continue
if tt == 35:
self.add_filter().TryMerge(d)
continue
if tt == 66:
self.set_search_query(d.getPrefixedString())
continue
if tt == 75:
self.add_order().TryMerge(d)
continue
if tt == 96:
self.set_offset(d.getVarInt32())
continue
if tt == 128:
self.set_limit(d.getVarInt32())
continue
if tt == 138:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_ancestor().TryMerge(tmp)
continue
if tt == 144:
self.set_hint(d.getVarInt32())
continue
if tt == 154:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_composite_index().TryMerge(tmp)
continue
if tt == 160:
self.set_require_perfect_plan(d.getBoolean())
continue
if tt == 168:
self.set_keys_only(d.getBoolean())
continue
if tt == 178:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_transaction().TryMerge(tmp)
continue
if tt == 184:
self.set_count(d.getVarInt32())
continue
if tt == 192:
self.set_distinct(d.getBoolean())
continue
if tt == 200:
self.set_compile(d.getBoolean())
continue
if tt == 208:
self.set_failover_ms(d.getVarInt64())
continue
if tt == 234:
self.set_name_space(d.getPrefixedString())
continue
if tt == 242:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_compiled_cursor().TryMerge(tmp)
continue
if tt == 250:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_end_compiled_cursor().TryMerge(tmp)
continue
if tt == 256:
self.set_strong(d.getBoolean())
continue
if tt == 266:
self.add_property_name(d.getPrefixedString())
continue
if tt == 274:
self.add_group_by_property_name(d.getPrefixedString())
continue
if tt == 280:
self.set_min_safe_time_seconds(d.getVarInt64())
continue
if tt == 290:
self.add_safe_replica_name(d.getPrefixedString())
continue
if tt == 296:
self.set_persist_offset(d.getBoolean())
continue
if tt == 314:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_header().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_header_:
res+=prefix+"header <\n"
res+=self.header_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_app_: res+=prefix+("app: %s\n" % self.DebugFormatString(self.app_))
if self.has_name_space_: res+=prefix+("name_space: %s\n" % self.DebugFormatString(self.name_space_))
if self.has_kind_: res+=prefix+("kind: %s\n" % self.DebugFormatString(self.kind_))
if self.has_ancestor_:
res+=prefix+"ancestor <\n"
res+=self.ancestor_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.filter_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Filter%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
if self.has_search_query_: res+=prefix+("search_query: %s\n" % self.DebugFormatString(self.search_query_))
cnt=0
for e in self.order_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Order%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
if self.has_hint_: res+=prefix+("hint: %s\n" % self.DebugFormatInt32(self.hint_))
if self.has_count_: res+=prefix+("count: %s\n" % self.DebugFormatInt32(self.count_))
if self.has_offset_: res+=prefix+("offset: %s\n" % self.DebugFormatInt32(self.offset_))
if self.has_limit_: res+=prefix+("limit: %s\n" % self.DebugFormatInt32(self.limit_))
if self.has_compiled_cursor_:
res+=prefix+"compiled_cursor <\n"
res+=self.compiled_cursor_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_end_compiled_cursor_:
res+=prefix+"end_compiled_cursor <\n"
res+=self.end_compiled_cursor_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.composite_index_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("composite_index%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_require_perfect_plan_: res+=prefix+("require_perfect_plan: %s\n" % self.DebugFormatBool(self.require_perfect_plan_))
if self.has_keys_only_: res+=prefix+("keys_only: %s\n" % self.DebugFormatBool(self.keys_only_))
if self.has_transaction_:
res+=prefix+"transaction <\n"
res+=self.transaction_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_compile_: res+=prefix+("compile: %s\n" % self.DebugFormatBool(self.compile_))
if self.has_failover_ms_: res+=prefix+("failover_ms: %s\n" % self.DebugFormatInt64(self.failover_ms_))
if self.has_strong_: res+=prefix+("strong: %s\n" % self.DebugFormatBool(self.strong_))
cnt=0
for e in self.property_name_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("property_name%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
cnt=0
for e in self.group_by_property_name_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("group_by_property_name%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
if self.has_distinct_: res+=prefix+("distinct: %s\n" % self.DebugFormatBool(self.distinct_))
if self.has_min_safe_time_seconds_: res+=prefix+("min_safe_time_seconds: %s\n" % self.DebugFormatInt64(self.min_safe_time_seconds_))
cnt=0
for e in self.safe_replica_name_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("safe_replica_name%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
if self.has_persist_offset_: res+=prefix+("persist_offset: %s\n" % self.DebugFormatBool(self.persist_offset_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kheader = 39
kapp = 1
kname_space = 29
kkind = 3
kancestor = 17
kFilterGroup = 4
kFilterop = 6
kFilterproperty = 14
ksearch_query = 8
kOrderGroup = 9
kOrderproperty = 10
kOrderdirection = 11
khint = 18
kcount = 23
koffset = 12
klimit = 16
kcompiled_cursor = 30
kend_compiled_cursor = 31
kcomposite_index = 19
krequire_perfect_plan = 20
kkeys_only = 21
ktransaction = 22
kcompile = 25
kfailover_ms = 26
kstrong = 32
kproperty_name = 33
kgroup_by_property_name = 34
kdistinct = 24
kmin_safe_time_seconds = 35
ksafe_replica_name = 36
kpersist_offset = 37
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "app",
3: "kind",
4: "Filter",
6: "op",
8: "search_query",
9: "Order",
10: "property",
11: "direction",
12: "offset",
14: "property",
16: "limit",
17: "ancestor",
18: "hint",
19: "composite_index",
20: "require_perfect_plan",
21: "keys_only",
22: "transaction",
23: "count",
24: "distinct",
25: "compile",
26: "failover_ms",
29: "name_space",
30: "compiled_cursor",
31: "end_compiled_cursor",
32: "strong",
33: "property_name",
34: "group_by_property_name",
35: "min_safe_time_seconds",
36: "safe_replica_name",
37: "persist_offset",
39: "header",
}, 39)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.STRING,
4: ProtocolBuffer.Encoder.STARTGROUP,
6: ProtocolBuffer.Encoder.NUMERIC,
8: ProtocolBuffer.Encoder.STRING,
9: ProtocolBuffer.Encoder.STARTGROUP,
10: ProtocolBuffer.Encoder.STRING,
11: ProtocolBuffer.Encoder.NUMERIC,
12: ProtocolBuffer.Encoder.NUMERIC,
14: ProtocolBuffer.Encoder.STRING,
16: ProtocolBuffer.Encoder.NUMERIC,
17: ProtocolBuffer.Encoder.STRING,
18: ProtocolBuffer.Encoder.NUMERIC,
19: ProtocolBuffer.Encoder.STRING,
20: ProtocolBuffer.Encoder.NUMERIC,
21: ProtocolBuffer.Encoder.NUMERIC,
22: ProtocolBuffer.Encoder.STRING,
23: ProtocolBuffer.Encoder.NUMERIC,
24: ProtocolBuffer.Encoder.NUMERIC,
25: ProtocolBuffer.Encoder.NUMERIC,
26: ProtocolBuffer.Encoder.NUMERIC,
29: ProtocolBuffer.Encoder.STRING,
30: ProtocolBuffer.Encoder.STRING,
31: ProtocolBuffer.Encoder.STRING,
32: ProtocolBuffer.Encoder.NUMERIC,
33: ProtocolBuffer.Encoder.STRING,
34: ProtocolBuffer.Encoder.STRING,
35: ProtocolBuffer.Encoder.NUMERIC,
36: ProtocolBuffer.Encoder.STRING,
37: ProtocolBuffer.Encoder.NUMERIC,
39: ProtocolBuffer.Encoder.STRING,
}, 39, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.Query'
class CompiledQuery_PrimaryScan(ProtocolBuffer.ProtocolMessage):
has_index_name_ = 0
index_name_ = ""
has_start_key_ = 0
start_key_ = ""
has_start_inclusive_ = 0
start_inclusive_ = 0
has_end_key_ = 0
end_key_ = ""
has_end_inclusive_ = 0
end_inclusive_ = 0
has_end_unapplied_log_timestamp_us_ = 0
end_unapplied_log_timestamp_us_ = 0
def __init__(self, contents=None):
self.start_postfix_value_ = []
self.end_postfix_value_ = []
if contents is not None: self.MergeFromString(contents)
def index_name(self): return self.index_name_
def set_index_name(self, x):
self.has_index_name_ = 1
self.index_name_ = x
def clear_index_name(self):
if self.has_index_name_:
self.has_index_name_ = 0
self.index_name_ = ""
def has_index_name(self): return self.has_index_name_
def start_key(self): return self.start_key_
def set_start_key(self, x):
self.has_start_key_ = 1
self.start_key_ = x
def clear_start_key(self):
if self.has_start_key_:
self.has_start_key_ = 0
self.start_key_ = ""
def has_start_key(self): return self.has_start_key_
def start_inclusive(self): return self.start_inclusive_
def set_start_inclusive(self, x):
self.has_start_inclusive_ = 1
self.start_inclusive_ = x
def clear_start_inclusive(self):
if self.has_start_inclusive_:
self.has_start_inclusive_ = 0
self.start_inclusive_ = 0
def has_start_inclusive(self): return self.has_start_inclusive_
def end_key(self): return self.end_key_
def set_end_key(self, x):
self.has_end_key_ = 1
self.end_key_ = x
def clear_end_key(self):
if self.has_end_key_:
self.has_end_key_ = 0
self.end_key_ = ""
def has_end_key(self): return self.has_end_key_
def end_inclusive(self): return self.end_inclusive_
def set_end_inclusive(self, x):
self.has_end_inclusive_ = 1
self.end_inclusive_ = x
def clear_end_inclusive(self):
if self.has_end_inclusive_:
self.has_end_inclusive_ = 0
self.end_inclusive_ = 0
def has_end_inclusive(self): return self.has_end_inclusive_
def start_postfix_value_size(self): return len(self.start_postfix_value_)
def start_postfix_value_list(self): return self.start_postfix_value_
def start_postfix_value(self, i):
return self.start_postfix_value_[i]
def set_start_postfix_value(self, i, x):
self.start_postfix_value_[i] = x
def add_start_postfix_value(self, x):
self.start_postfix_value_.append(x)
def clear_start_postfix_value(self):
self.start_postfix_value_ = []
def end_postfix_value_size(self): return len(self.end_postfix_value_)
def end_postfix_value_list(self): return self.end_postfix_value_
def end_postfix_value(self, i):
return self.end_postfix_value_[i]
def set_end_postfix_value(self, i, x):
self.end_postfix_value_[i] = x
def add_end_postfix_value(self, x):
self.end_postfix_value_.append(x)
def clear_end_postfix_value(self):
self.end_postfix_value_ = []
def end_unapplied_log_timestamp_us(self): return self.end_unapplied_log_timestamp_us_
def set_end_unapplied_log_timestamp_us(self, x):
self.has_end_unapplied_log_timestamp_us_ = 1
self.end_unapplied_log_timestamp_us_ = x
def clear_end_unapplied_log_timestamp_us(self):
if self.has_end_unapplied_log_timestamp_us_:
self.has_end_unapplied_log_timestamp_us_ = 0
self.end_unapplied_log_timestamp_us_ = 0
def has_end_unapplied_log_timestamp_us(self): return self.has_end_unapplied_log_timestamp_us_
def MergeFrom(self, x):
assert x is not self
if (x.has_index_name()): self.set_index_name(x.index_name())
if (x.has_start_key()): self.set_start_key(x.start_key())
if (x.has_start_inclusive()): self.set_start_inclusive(x.start_inclusive())
if (x.has_end_key()): self.set_end_key(x.end_key())
if (x.has_end_inclusive()): self.set_end_inclusive(x.end_inclusive())
for i in xrange(x.start_postfix_value_size()): self.add_start_postfix_value(x.start_postfix_value(i))
for i in xrange(x.end_postfix_value_size()): self.add_end_postfix_value(x.end_postfix_value(i))
if (x.has_end_unapplied_log_timestamp_us()): self.set_end_unapplied_log_timestamp_us(x.end_unapplied_log_timestamp_us())
def Equals(self, x):
if x is self: return 1
if self.has_index_name_ != x.has_index_name_: return 0
if self.has_index_name_ and self.index_name_ != x.index_name_: return 0
if self.has_start_key_ != x.has_start_key_: return 0
if self.has_start_key_ and self.start_key_ != x.start_key_: return 0
if self.has_start_inclusive_ != x.has_start_inclusive_: return 0
if self.has_start_inclusive_ and self.start_inclusive_ != x.start_inclusive_: return 0
if self.has_end_key_ != x.has_end_key_: return 0
if self.has_end_key_ and self.end_key_ != x.end_key_: return 0
if self.has_end_inclusive_ != x.has_end_inclusive_: return 0
if self.has_end_inclusive_ and self.end_inclusive_ != x.end_inclusive_: return 0
if len(self.start_postfix_value_) != len(x.start_postfix_value_): return 0
for e1, e2 in zip(self.start_postfix_value_, x.start_postfix_value_):
if e1 != e2: return 0
if len(self.end_postfix_value_) != len(x.end_postfix_value_): return 0
for e1, e2 in zip(self.end_postfix_value_, x.end_postfix_value_):
if e1 != e2: return 0
if self.has_end_unapplied_log_timestamp_us_ != x.has_end_unapplied_log_timestamp_us_: return 0
if self.has_end_unapplied_log_timestamp_us_ and self.end_unapplied_log_timestamp_us_ != x.end_unapplied_log_timestamp_us_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
if (self.has_index_name_): n += 1 + self.lengthString(len(self.index_name_))
if (self.has_start_key_): n += 1 + self.lengthString(len(self.start_key_))
if (self.has_start_inclusive_): n += 2
if (self.has_end_key_): n += 1 + self.lengthString(len(self.end_key_))
if (self.has_end_inclusive_): n += 2
n += 2 * len(self.start_postfix_value_)
for i in xrange(len(self.start_postfix_value_)): n += self.lengthString(len(self.start_postfix_value_[i]))
n += 2 * len(self.end_postfix_value_)
for i in xrange(len(self.end_postfix_value_)): n += self.lengthString(len(self.end_postfix_value_[i]))
if (self.has_end_unapplied_log_timestamp_us_): n += 2 + self.lengthVarInt64(self.end_unapplied_log_timestamp_us_)
return n
def ByteSizePartial(self):
n = 0
if (self.has_index_name_): n += 1 + self.lengthString(len(self.index_name_))
if (self.has_start_key_): n += 1 + self.lengthString(len(self.start_key_))
if (self.has_start_inclusive_): n += 2
if (self.has_end_key_): n += 1 + self.lengthString(len(self.end_key_))
if (self.has_end_inclusive_): n += 2
n += 2 * len(self.start_postfix_value_)
for i in xrange(len(self.start_postfix_value_)): n += self.lengthString(len(self.start_postfix_value_[i]))
n += 2 * len(self.end_postfix_value_)
for i in xrange(len(self.end_postfix_value_)): n += self.lengthString(len(self.end_postfix_value_[i]))
if (self.has_end_unapplied_log_timestamp_us_): n += 2 + self.lengthVarInt64(self.end_unapplied_log_timestamp_us_)
return n
def Clear(self):
self.clear_index_name()
self.clear_start_key()
self.clear_start_inclusive()
self.clear_end_key()
self.clear_end_inclusive()
self.clear_start_postfix_value()
self.clear_end_postfix_value()
self.clear_end_unapplied_log_timestamp_us()
def OutputUnchecked(self, out):
if (self.has_index_name_):
out.putVarInt32(18)
out.putPrefixedString(self.index_name_)
if (self.has_start_key_):
out.putVarInt32(26)
out.putPrefixedString(self.start_key_)
if (self.has_start_inclusive_):
out.putVarInt32(32)
out.putBoolean(self.start_inclusive_)
if (self.has_end_key_):
out.putVarInt32(42)
out.putPrefixedString(self.end_key_)
if (self.has_end_inclusive_):
out.putVarInt32(48)
out.putBoolean(self.end_inclusive_)
if (self.has_end_unapplied_log_timestamp_us_):
out.putVarInt32(152)
out.putVarInt64(self.end_unapplied_log_timestamp_us_)
for i in xrange(len(self.start_postfix_value_)):
out.putVarInt32(178)
out.putPrefixedString(self.start_postfix_value_[i])
for i in xrange(len(self.end_postfix_value_)):
out.putVarInt32(186)
out.putPrefixedString(self.end_postfix_value_[i])
def OutputPartial(self, out):
if (self.has_index_name_):
out.putVarInt32(18)
out.putPrefixedString(self.index_name_)
if (self.has_start_key_):
out.putVarInt32(26)
out.putPrefixedString(self.start_key_)
if (self.has_start_inclusive_):
out.putVarInt32(32)
out.putBoolean(self.start_inclusive_)
if (self.has_end_key_):
out.putVarInt32(42)
out.putPrefixedString(self.end_key_)
if (self.has_end_inclusive_):
out.putVarInt32(48)
out.putBoolean(self.end_inclusive_)
if (self.has_end_unapplied_log_timestamp_us_):
out.putVarInt32(152)
out.putVarInt64(self.end_unapplied_log_timestamp_us_)
for i in xrange(len(self.start_postfix_value_)):
out.putVarInt32(178)
out.putPrefixedString(self.start_postfix_value_[i])
for i in xrange(len(self.end_postfix_value_)):
out.putVarInt32(186)
out.putPrefixedString(self.end_postfix_value_[i])
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 12: break
if tt == 18:
self.set_index_name(d.getPrefixedString())
continue
if tt == 26:
self.set_start_key(d.getPrefixedString())
continue
if tt == 32:
self.set_start_inclusive(d.getBoolean())
continue
if tt == 42:
self.set_end_key(d.getPrefixedString())
continue
if tt == 48:
self.set_end_inclusive(d.getBoolean())
continue
if tt == 152:
self.set_end_unapplied_log_timestamp_us(d.getVarInt64())
continue
if tt == 178:
self.add_start_postfix_value(d.getPrefixedString())
continue
if tt == 186:
self.add_end_postfix_value(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_index_name_: res+=prefix+("index_name: %s\n" % self.DebugFormatString(self.index_name_))
if self.has_start_key_: res+=prefix+("start_key: %s\n" % self.DebugFormatString(self.start_key_))
if self.has_start_inclusive_: res+=prefix+("start_inclusive: %s\n" % self.DebugFormatBool(self.start_inclusive_))
if self.has_end_key_: res+=prefix+("end_key: %s\n" % self.DebugFormatString(self.end_key_))
if self.has_end_inclusive_: res+=prefix+("end_inclusive: %s\n" % self.DebugFormatBool(self.end_inclusive_))
cnt=0
for e in self.start_postfix_value_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("start_postfix_value%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
cnt=0
for e in self.end_postfix_value_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("end_postfix_value%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
if self.has_end_unapplied_log_timestamp_us_: res+=prefix+("end_unapplied_log_timestamp_us: %s\n" % self.DebugFormatInt64(self.end_unapplied_log_timestamp_us_))
return res
class CompiledQuery_MergeJoinScan(ProtocolBuffer.ProtocolMessage):
has_index_name_ = 0
index_name_ = ""
has_value_prefix_ = 0
value_prefix_ = 0
def __init__(self, contents=None):
self.prefix_value_ = []
if contents is not None: self.MergeFromString(contents)
def index_name(self): return self.index_name_
def set_index_name(self, x):
self.has_index_name_ = 1
self.index_name_ = x
def clear_index_name(self):
if self.has_index_name_:
self.has_index_name_ = 0
self.index_name_ = ""
def has_index_name(self): return self.has_index_name_
def prefix_value_size(self): return len(self.prefix_value_)
def prefix_value_list(self): return self.prefix_value_
def prefix_value(self, i):
return self.prefix_value_[i]
def set_prefix_value(self, i, x):
self.prefix_value_[i] = x
def add_prefix_value(self, x):
self.prefix_value_.append(x)
def clear_prefix_value(self):
self.prefix_value_ = []
def value_prefix(self): return self.value_prefix_
def set_value_prefix(self, x):
self.has_value_prefix_ = 1
self.value_prefix_ = x
def clear_value_prefix(self):
if self.has_value_prefix_:
self.has_value_prefix_ = 0
self.value_prefix_ = 0
def has_value_prefix(self): return self.has_value_prefix_
def MergeFrom(self, x):
assert x is not self
if (x.has_index_name()): self.set_index_name(x.index_name())
for i in xrange(x.prefix_value_size()): self.add_prefix_value(x.prefix_value(i))
if (x.has_value_prefix()): self.set_value_prefix(x.value_prefix())
def Equals(self, x):
if x is self: return 1
if self.has_index_name_ != x.has_index_name_: return 0
if self.has_index_name_ and self.index_name_ != x.index_name_: return 0
if len(self.prefix_value_) != len(x.prefix_value_): return 0
for e1, e2 in zip(self.prefix_value_, x.prefix_value_):
if e1 != e2: return 0
if self.has_value_prefix_ != x.has_value_prefix_: return 0
if self.has_value_prefix_ and self.value_prefix_ != x.value_prefix_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_index_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: index_name not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.index_name_))
n += 1 * len(self.prefix_value_)
for i in xrange(len(self.prefix_value_)): n += self.lengthString(len(self.prefix_value_[i]))
if (self.has_value_prefix_): n += 3
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_index_name_):
n += 1
n += self.lengthString(len(self.index_name_))
n += 1 * len(self.prefix_value_)
for i in xrange(len(self.prefix_value_)): n += self.lengthString(len(self.prefix_value_[i]))
if (self.has_value_prefix_): n += 3
return n
def Clear(self):
self.clear_index_name()
self.clear_prefix_value()
self.clear_value_prefix()
def OutputUnchecked(self, out):
out.putVarInt32(66)
out.putPrefixedString(self.index_name_)
for i in xrange(len(self.prefix_value_)):
out.putVarInt32(74)
out.putPrefixedString(self.prefix_value_[i])
if (self.has_value_prefix_):
out.putVarInt32(160)
out.putBoolean(self.value_prefix_)
def OutputPartial(self, out):
if (self.has_index_name_):
out.putVarInt32(66)
out.putPrefixedString(self.index_name_)
for i in xrange(len(self.prefix_value_)):
out.putVarInt32(74)
out.putPrefixedString(self.prefix_value_[i])
if (self.has_value_prefix_):
out.putVarInt32(160)
out.putBoolean(self.value_prefix_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 60: break
if tt == 66:
self.set_index_name(d.getPrefixedString())
continue
if tt == 74:
self.add_prefix_value(d.getPrefixedString())
continue
if tt == 160:
self.set_value_prefix(d.getBoolean())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_index_name_: res+=prefix+("index_name: %s\n" % self.DebugFormatString(self.index_name_))
cnt=0
for e in self.prefix_value_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("prefix_value%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
if self.has_value_prefix_: res+=prefix+("value_prefix: %s\n" % self.DebugFormatBool(self.value_prefix_))
return res
class CompiledQuery_EntityFilter(ProtocolBuffer.ProtocolMessage):
has_distinct_ = 0
distinct_ = 0
has_kind_ = 0
kind_ = ""
has_ancestor_ = 0
ancestor_ = None
def __init__(self, contents=None):
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def distinct(self): return self.distinct_
def set_distinct(self, x):
self.has_distinct_ = 1
self.distinct_ = x
def clear_distinct(self):
if self.has_distinct_:
self.has_distinct_ = 0
self.distinct_ = 0
def has_distinct(self): return self.has_distinct_
def kind(self): return self.kind_
def set_kind(self, x):
self.has_kind_ = 1
self.kind_ = x
def clear_kind(self):
if self.has_kind_:
self.has_kind_ = 0
self.kind_ = ""
def has_kind(self): return self.has_kind_
def ancestor(self):
if self.ancestor_ is None:
self.lazy_init_lock_.acquire()
try:
if self.ancestor_ is None: self.ancestor_ = Reference()
finally:
self.lazy_init_lock_.release()
return self.ancestor_
def mutable_ancestor(self): self.has_ancestor_ = 1; return self.ancestor()
def clear_ancestor(self):
if self.has_ancestor_:
self.has_ancestor_ = 0;
if self.ancestor_ is not None: self.ancestor_.Clear()
def has_ancestor(self): return self.has_ancestor_
def MergeFrom(self, x):
assert x is not self
if (x.has_distinct()): self.set_distinct(x.distinct())
if (x.has_kind()): self.set_kind(x.kind())
if (x.has_ancestor()): self.mutable_ancestor().MergeFrom(x.ancestor())
def Equals(self, x):
if x is self: return 1
if self.has_distinct_ != x.has_distinct_: return 0
if self.has_distinct_ and self.distinct_ != x.distinct_: return 0
if self.has_kind_ != x.has_kind_: return 0
if self.has_kind_ and self.kind_ != x.kind_: return 0
if self.has_ancestor_ != x.has_ancestor_: return 0
if self.has_ancestor_ and self.ancestor_ != x.ancestor_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_ancestor_ and not self.ancestor_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_distinct_): n += 2
if (self.has_kind_): n += 2 + self.lengthString(len(self.kind_))
if (self.has_ancestor_): n += 2 + self.lengthString(self.ancestor_.ByteSize())
return n
def ByteSizePartial(self):
n = 0
if (self.has_distinct_): n += 2
if (self.has_kind_): n += 2 + self.lengthString(len(self.kind_))
if (self.has_ancestor_): n += 2 + self.lengthString(self.ancestor_.ByteSizePartial())
return n
def Clear(self):
self.clear_distinct()
self.clear_kind()
self.clear_ancestor()
def OutputUnchecked(self, out):
if (self.has_distinct_):
out.putVarInt32(112)
out.putBoolean(self.distinct_)
if (self.has_kind_):
out.putVarInt32(138)
out.putPrefixedString(self.kind_)
if (self.has_ancestor_):
out.putVarInt32(146)
out.putVarInt32(self.ancestor_.ByteSize())
self.ancestor_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_distinct_):
out.putVarInt32(112)
out.putBoolean(self.distinct_)
if (self.has_kind_):
out.putVarInt32(138)
out.putPrefixedString(self.kind_)
if (self.has_ancestor_):
out.putVarInt32(146)
out.putVarInt32(self.ancestor_.ByteSizePartial())
self.ancestor_.OutputPartial(out)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 108: break
if tt == 112:
self.set_distinct(d.getBoolean())
continue
if tt == 138:
self.set_kind(d.getPrefixedString())
continue
if tt == 146:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_ancestor().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_distinct_: res+=prefix+("distinct: %s\n" % self.DebugFormatBool(self.distinct_))
if self.has_kind_: res+=prefix+("kind: %s\n" % self.DebugFormatString(self.kind_))
if self.has_ancestor_:
res+=prefix+"ancestor <\n"
res+=self.ancestor_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
class CompiledQuery(ProtocolBuffer.ProtocolMessage):
has_primaryscan_ = 0
has_index_def_ = 0
index_def_ = None
has_offset_ = 0
offset_ = 0
has_limit_ = 0
limit_ = 0
has_keys_only_ = 0
keys_only_ = 0
has_distinct_infix_size_ = 0
distinct_infix_size_ = 0
has_entityfilter_ = 0
entityfilter_ = None
has_plan_label_ = 0
plan_label_ = ""
def __init__(self, contents=None):
self.primaryscan_ = CompiledQuery_PrimaryScan()
self.mergejoinscan_ = []
self.property_name_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def primaryscan(self): return self.primaryscan_
def mutable_primaryscan(self): self.has_primaryscan_ = 1; return self.primaryscan_
def clear_primaryscan(self):self.has_primaryscan_ = 0; self.primaryscan_.Clear()
def has_primaryscan(self): return self.has_primaryscan_
def mergejoinscan_size(self): return len(self.mergejoinscan_)
def mergejoinscan_list(self): return self.mergejoinscan_
def mergejoinscan(self, i):
return self.mergejoinscan_[i]
def mutable_mergejoinscan(self, i):
return self.mergejoinscan_[i]
def add_mergejoinscan(self):
x = CompiledQuery_MergeJoinScan()
self.mergejoinscan_.append(x)
return x
def clear_mergejoinscan(self):
self.mergejoinscan_ = []
def index_def(self):
if self.index_def_ is None:
self.lazy_init_lock_.acquire()
try:
if self.index_def_ is None: self.index_def_ = Index()
finally:
self.lazy_init_lock_.release()
return self.index_def_
def mutable_index_def(self): self.has_index_def_ = 1; return self.index_def()
def clear_index_def(self):
if self.has_index_def_:
self.has_index_def_ = 0;
if self.index_def_ is not None: self.index_def_.Clear()
def has_index_def(self): return self.has_index_def_
def offset(self): return self.offset_
def set_offset(self, x):
self.has_offset_ = 1
self.offset_ = x
def clear_offset(self):
if self.has_offset_:
self.has_offset_ = 0
self.offset_ = 0
def has_offset(self): return self.has_offset_
def limit(self): return self.limit_
def set_limit(self, x):
self.has_limit_ = 1
self.limit_ = x
def clear_limit(self):
if self.has_limit_:
self.has_limit_ = 0
self.limit_ = 0
def has_limit(self): return self.has_limit_
def keys_only(self): return self.keys_only_
def set_keys_only(self, x):
self.has_keys_only_ = 1
self.keys_only_ = x
def clear_keys_only(self):
if self.has_keys_only_:
self.has_keys_only_ = 0
self.keys_only_ = 0
def has_keys_only(self): return self.has_keys_only_
def property_name_size(self): return len(self.property_name_)
def property_name_list(self): return self.property_name_
def property_name(self, i):
return self.property_name_[i]
def set_property_name(self, i, x):
self.property_name_[i] = x
def add_property_name(self, x):
self.property_name_.append(x)
def clear_property_name(self):
self.property_name_ = []
def distinct_infix_size(self): return self.distinct_infix_size_
def set_distinct_infix_size(self, x):
self.has_distinct_infix_size_ = 1
self.distinct_infix_size_ = x
def clear_distinct_infix_size(self):
if self.has_distinct_infix_size_:
self.has_distinct_infix_size_ = 0
self.distinct_infix_size_ = 0
def has_distinct_infix_size(self): return self.has_distinct_infix_size_
def entityfilter(self):
if self.entityfilter_ is None:
self.lazy_init_lock_.acquire()
try:
if self.entityfilter_ is None: self.entityfilter_ = CompiledQuery_EntityFilter()
finally:
self.lazy_init_lock_.release()
return self.entityfilter_
def mutable_entityfilter(self): self.has_entityfilter_ = 1; return self.entityfilter()
def clear_entityfilter(self):
if self.has_entityfilter_:
self.has_entityfilter_ = 0;
if self.entityfilter_ is not None: self.entityfilter_.Clear()
def has_entityfilter(self): return self.has_entityfilter_
def plan_label(self): return self.plan_label_
def set_plan_label(self, x):
self.has_plan_label_ = 1
self.plan_label_ = x
def clear_plan_label(self):
if self.has_plan_label_:
self.has_plan_label_ = 0
self.plan_label_ = ""
def has_plan_label(self): return self.has_plan_label_
def MergeFrom(self, x):
assert x is not self
if (x.has_primaryscan()): self.mutable_primaryscan().MergeFrom(x.primaryscan())
for i in xrange(x.mergejoinscan_size()): self.add_mergejoinscan().CopyFrom(x.mergejoinscan(i))
if (x.has_index_def()): self.mutable_index_def().MergeFrom(x.index_def())
if (x.has_offset()): self.set_offset(x.offset())
if (x.has_limit()): self.set_limit(x.limit())
if (x.has_keys_only()): self.set_keys_only(x.keys_only())
for i in xrange(x.property_name_size()): self.add_property_name(x.property_name(i))
if (x.has_distinct_infix_size()): self.set_distinct_infix_size(x.distinct_infix_size())
if (x.has_entityfilter()): self.mutable_entityfilter().MergeFrom(x.entityfilter())
if (x.has_plan_label()): self.set_plan_label(x.plan_label())
def Equals(self, x):
if x is self: return 1
if self.has_primaryscan_ != x.has_primaryscan_: return 0
if self.has_primaryscan_ and self.primaryscan_ != x.primaryscan_: return 0
if len(self.mergejoinscan_) != len(x.mergejoinscan_): return 0
for e1, e2 in zip(self.mergejoinscan_, x.mergejoinscan_):
if e1 != e2: return 0
if self.has_index_def_ != x.has_index_def_: return 0
if self.has_index_def_ and self.index_def_ != x.index_def_: return 0
if self.has_offset_ != x.has_offset_: return 0
if self.has_offset_ and self.offset_ != x.offset_: return 0
if self.has_limit_ != x.has_limit_: return 0
if self.has_limit_ and self.limit_ != x.limit_: return 0
if self.has_keys_only_ != x.has_keys_only_: return 0
if self.has_keys_only_ and self.keys_only_ != x.keys_only_: return 0
if len(self.property_name_) != len(x.property_name_): return 0
for e1, e2 in zip(self.property_name_, x.property_name_):
if e1 != e2: return 0
if self.has_distinct_infix_size_ != x.has_distinct_infix_size_: return 0
if self.has_distinct_infix_size_ and self.distinct_infix_size_ != x.distinct_infix_size_: return 0
if self.has_entityfilter_ != x.has_entityfilter_: return 0
if self.has_entityfilter_ and self.entityfilter_ != x.entityfilter_: return 0
if self.has_plan_label_ != x.has_plan_label_: return 0
if self.has_plan_label_ and self.plan_label_ != x.plan_label_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_primaryscan_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: primaryscan not set.')
elif not self.primaryscan_.IsInitialized(debug_strs): initialized = 0
for p in self.mergejoinscan_:
if not p.IsInitialized(debug_strs): initialized=0
if (self.has_index_def_ and not self.index_def_.IsInitialized(debug_strs)): initialized = 0
if (not self.has_keys_only_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: keys_only not set.')
if (self.has_entityfilter_ and not self.entityfilter_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += self.primaryscan_.ByteSize()
n += 2 * len(self.mergejoinscan_)
for i in xrange(len(self.mergejoinscan_)): n += self.mergejoinscan_[i].ByteSize()
if (self.has_index_def_): n += 2 + self.lengthString(self.index_def_.ByteSize())
if (self.has_offset_): n += 1 + self.lengthVarInt64(self.offset_)
if (self.has_limit_): n += 1 + self.lengthVarInt64(self.limit_)
n += 2 * len(self.property_name_)
for i in xrange(len(self.property_name_)): n += self.lengthString(len(self.property_name_[i]))
if (self.has_distinct_infix_size_): n += 2 + self.lengthVarInt64(self.distinct_infix_size_)
if (self.has_entityfilter_): n += 2 + self.entityfilter_.ByteSize()
if (self.has_plan_label_): n += 2 + self.lengthString(len(self.plan_label_))
return n + 4
def ByteSizePartial(self):
n = 0
if (self.has_primaryscan_):
n += 2
n += self.primaryscan_.ByteSizePartial()
n += 2 * len(self.mergejoinscan_)
for i in xrange(len(self.mergejoinscan_)): n += self.mergejoinscan_[i].ByteSizePartial()
if (self.has_index_def_): n += 2 + self.lengthString(self.index_def_.ByteSizePartial())
if (self.has_offset_): n += 1 + self.lengthVarInt64(self.offset_)
if (self.has_limit_): n += 1 + self.lengthVarInt64(self.limit_)
if (self.has_keys_only_):
n += 2
n += 2 * len(self.property_name_)
for i in xrange(len(self.property_name_)): n += self.lengthString(len(self.property_name_[i]))
if (self.has_distinct_infix_size_): n += 2 + self.lengthVarInt64(self.distinct_infix_size_)
if (self.has_entityfilter_): n += 2 + self.entityfilter_.ByteSizePartial()
if (self.has_plan_label_): n += 2 + self.lengthString(len(self.plan_label_))
return n
def Clear(self):
self.clear_primaryscan()
self.clear_mergejoinscan()
self.clear_index_def()
self.clear_offset()
self.clear_limit()
self.clear_keys_only()
self.clear_property_name()
self.clear_distinct_infix_size()
self.clear_entityfilter()
self.clear_plan_label()
def OutputUnchecked(self, out):
out.putVarInt32(11)
self.primaryscan_.OutputUnchecked(out)
out.putVarInt32(12)
for i in xrange(len(self.mergejoinscan_)):
out.putVarInt32(59)
self.mergejoinscan_[i].OutputUnchecked(out)
out.putVarInt32(60)
if (self.has_offset_):
out.putVarInt32(80)
out.putVarInt32(self.offset_)
if (self.has_limit_):
out.putVarInt32(88)
out.putVarInt32(self.limit_)
out.putVarInt32(96)
out.putBoolean(self.keys_only_)
if (self.has_entityfilter_):
out.putVarInt32(107)
self.entityfilter_.OutputUnchecked(out)
out.putVarInt32(108)
if (self.has_index_def_):
out.putVarInt32(170)
out.putVarInt32(self.index_def_.ByteSize())
self.index_def_.OutputUnchecked(out)
for i in xrange(len(self.property_name_)):
out.putVarInt32(194)
out.putPrefixedString(self.property_name_[i])
if (self.has_distinct_infix_size_):
out.putVarInt32(200)
out.putVarInt32(self.distinct_infix_size_)
if (self.has_plan_label_):
out.putVarInt32(210)
out.putPrefixedString(self.plan_label_)
def OutputPartial(self, out):
if (self.has_primaryscan_):
out.putVarInt32(11)
self.primaryscan_.OutputPartial(out)
out.putVarInt32(12)
for i in xrange(len(self.mergejoinscan_)):
out.putVarInt32(59)
self.mergejoinscan_[i].OutputPartial(out)
out.putVarInt32(60)
if (self.has_offset_):
out.putVarInt32(80)
out.putVarInt32(self.offset_)
if (self.has_limit_):
out.putVarInt32(88)
out.putVarInt32(self.limit_)
if (self.has_keys_only_):
out.putVarInt32(96)
out.putBoolean(self.keys_only_)
if (self.has_entityfilter_):
out.putVarInt32(107)
self.entityfilter_.OutputPartial(out)
out.putVarInt32(108)
if (self.has_index_def_):
out.putVarInt32(170)
out.putVarInt32(self.index_def_.ByteSizePartial())
self.index_def_.OutputPartial(out)
for i in xrange(len(self.property_name_)):
out.putVarInt32(194)
out.putPrefixedString(self.property_name_[i])
if (self.has_distinct_infix_size_):
out.putVarInt32(200)
out.putVarInt32(self.distinct_infix_size_)
if (self.has_plan_label_):
out.putVarInt32(210)
out.putPrefixedString(self.plan_label_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 11:
self.mutable_primaryscan().TryMerge(d)
continue
if tt == 59:
self.add_mergejoinscan().TryMerge(d)
continue
if tt == 80:
self.set_offset(d.getVarInt32())
continue
if tt == 88:
self.set_limit(d.getVarInt32())
continue
if tt == 96:
self.set_keys_only(d.getBoolean())
continue
if tt == 107:
self.mutable_entityfilter().TryMerge(d)
continue
if tt == 170:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_index_def().TryMerge(tmp)
continue
if tt == 194:
self.add_property_name(d.getPrefixedString())
continue
if tt == 200:
self.set_distinct_infix_size(d.getVarInt32())
continue
if tt == 210:
self.set_plan_label(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_primaryscan_:
res+=prefix+"PrimaryScan {\n"
res+=self.primaryscan_.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt=0
for e in self.mergejoinscan_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("MergeJoinScan%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
if self.has_index_def_:
res+=prefix+"index_def <\n"
res+=self.index_def_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_offset_: res+=prefix+("offset: %s\n" % self.DebugFormatInt32(self.offset_))
if self.has_limit_: res+=prefix+("limit: %s\n" % self.DebugFormatInt32(self.limit_))
if self.has_keys_only_: res+=prefix+("keys_only: %s\n" % self.DebugFormatBool(self.keys_only_))
cnt=0
for e in self.property_name_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("property_name%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
if self.has_distinct_infix_size_: res+=prefix+("distinct_infix_size: %s\n" % self.DebugFormatInt32(self.distinct_infix_size_))
if self.has_entityfilter_:
res+=prefix+"EntityFilter {\n"
res+=self.entityfilter_.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
if self.has_plan_label_: res+=prefix+("plan_label: %s\n" % self.DebugFormatString(self.plan_label_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kPrimaryScanGroup = 1
kPrimaryScanindex_name = 2
kPrimaryScanstart_key = 3
kPrimaryScanstart_inclusive = 4
kPrimaryScanend_key = 5
kPrimaryScanend_inclusive = 6
kPrimaryScanstart_postfix_value = 22
kPrimaryScanend_postfix_value = 23
kPrimaryScanend_unapplied_log_timestamp_us = 19
kMergeJoinScanGroup = 7
kMergeJoinScanindex_name = 8
kMergeJoinScanprefix_value = 9
kMergeJoinScanvalue_prefix = 20
kindex_def = 21
koffset = 10
klimit = 11
kkeys_only = 12
kproperty_name = 24
kdistinct_infix_size = 25
kEntityFilterGroup = 13
kEntityFilterdistinct = 14
kEntityFilterkind = 17
kEntityFilterancestor = 18
kplan_label = 26
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "PrimaryScan",
2: "index_name",
3: "start_key",
4: "start_inclusive",
5: "end_key",
6: "end_inclusive",
7: "MergeJoinScan",
8: "index_name",
9: "prefix_value",
10: "offset",
11: "limit",
12: "keys_only",
13: "EntityFilter",
14: "distinct",
17: "kind",
18: "ancestor",
19: "end_unapplied_log_timestamp_us",
20: "value_prefix",
21: "index_def",
22: "start_postfix_value",
23: "end_postfix_value",
24: "property_name",
25: "distinct_infix_size",
26: "plan_label",
}, 26)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STARTGROUP,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.STRING,
4: ProtocolBuffer.Encoder.NUMERIC,
5: ProtocolBuffer.Encoder.STRING,
6: ProtocolBuffer.Encoder.NUMERIC,
7: ProtocolBuffer.Encoder.STARTGROUP,
8: ProtocolBuffer.Encoder.STRING,
9: ProtocolBuffer.Encoder.STRING,
10: ProtocolBuffer.Encoder.NUMERIC,
11: ProtocolBuffer.Encoder.NUMERIC,
12: ProtocolBuffer.Encoder.NUMERIC,
13: ProtocolBuffer.Encoder.STARTGROUP,
14: ProtocolBuffer.Encoder.NUMERIC,
17: ProtocolBuffer.Encoder.STRING,
18: ProtocolBuffer.Encoder.STRING,
19: ProtocolBuffer.Encoder.NUMERIC,
20: ProtocolBuffer.Encoder.NUMERIC,
21: ProtocolBuffer.Encoder.STRING,
22: ProtocolBuffer.Encoder.STRING,
23: ProtocolBuffer.Encoder.STRING,
24: ProtocolBuffer.Encoder.STRING,
25: ProtocolBuffer.Encoder.NUMERIC,
26: ProtocolBuffer.Encoder.STRING,
}, 26, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.CompiledQuery'
class CompiledCursor_PositionIndexValue(ProtocolBuffer.ProtocolMessage):
has_property_ = 0
property_ = ""
has_value_ = 0
def __init__(self, contents=None):
self.value_ = PropertyValue()
if contents is not None: self.MergeFromString(contents)
def property(self): return self.property_
def set_property(self, x):
self.has_property_ = 1
self.property_ = x
def clear_property(self):
if self.has_property_:
self.has_property_ = 0
self.property_ = ""
def has_property(self): return self.has_property_
def value(self): return self.value_
def mutable_value(self): self.has_value_ = 1; return self.value_
def clear_value(self):self.has_value_ = 0; self.value_.Clear()
def has_value(self): return self.has_value_
def MergeFrom(self, x):
assert x is not self
if (x.has_property()): self.set_property(x.property())
if (x.has_value()): self.mutable_value().MergeFrom(x.value())
def Equals(self, x):
if x is self: return 1
if self.has_property_ != x.has_property_: return 0
if self.has_property_ and self.property_ != x.property_: return 0
if self.has_value_ != x.has_value_: return 0
if self.has_value_ and self.value_ != x.value_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_value_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: value not set.')
elif not self.value_.IsInitialized(debug_strs): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_property_): n += 2 + self.lengthString(len(self.property_))
n += self.lengthString(self.value_.ByteSize())
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_property_): n += 2 + self.lengthString(len(self.property_))
if (self.has_value_):
n += 2
n += self.lengthString(self.value_.ByteSizePartial())
return n
def Clear(self):
self.clear_property()
self.clear_value()
def OutputUnchecked(self, out):
if (self.has_property_):
out.putVarInt32(242)
out.putPrefixedString(self.property_)
out.putVarInt32(250)
out.putVarInt32(self.value_.ByteSize())
self.value_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_property_):
out.putVarInt32(242)
out.putPrefixedString(self.property_)
if (self.has_value_):
out.putVarInt32(250)
out.putVarInt32(self.value_.ByteSizePartial())
self.value_.OutputPartial(out)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 236: break
if tt == 242:
self.set_property(d.getPrefixedString())
continue
if tt == 250:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_value().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_property_: res+=prefix+("property: %s\n" % self.DebugFormatString(self.property_))
if self.has_value_:
res+=prefix+"value <\n"
res+=self.value_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
class CompiledCursor_Position(ProtocolBuffer.ProtocolMessage):
has_start_key_ = 0
start_key_ = ""
has_key_ = 0
key_ = None
has_start_inclusive_ = 0
start_inclusive_ = 1
has_before_ascending_ = 0
before_ascending_ = 0
def __init__(self, contents=None):
self.indexvalue_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def start_key(self): return self.start_key_
def set_start_key(self, x):
self.has_start_key_ = 1
self.start_key_ = x
def clear_start_key(self):
if self.has_start_key_:
self.has_start_key_ = 0
self.start_key_ = ""
def has_start_key(self): return self.has_start_key_
def indexvalue_size(self): return len(self.indexvalue_)
def indexvalue_list(self): return self.indexvalue_
def indexvalue(self, i):
return self.indexvalue_[i]
def mutable_indexvalue(self, i):
return self.indexvalue_[i]
def add_indexvalue(self):
x = CompiledCursor_PositionIndexValue()
self.indexvalue_.append(x)
return x
def clear_indexvalue(self):
self.indexvalue_ = []
def key(self):
if self.key_ is None:
self.lazy_init_lock_.acquire()
try:
if self.key_ is None: self.key_ = Reference()
finally:
self.lazy_init_lock_.release()
return self.key_
def mutable_key(self): self.has_key_ = 1; return self.key()
def clear_key(self):
if self.has_key_:
self.has_key_ = 0;
if self.key_ is not None: self.key_.Clear()
def has_key(self): return self.has_key_
def start_inclusive(self): return self.start_inclusive_
def set_start_inclusive(self, x):
self.has_start_inclusive_ = 1
self.start_inclusive_ = x
def clear_start_inclusive(self):
if self.has_start_inclusive_:
self.has_start_inclusive_ = 0
self.start_inclusive_ = 1
def has_start_inclusive(self): return self.has_start_inclusive_
def before_ascending(self): return self.before_ascending_
def set_before_ascending(self, x):
self.has_before_ascending_ = 1
self.before_ascending_ = x
def clear_before_ascending(self):
if self.has_before_ascending_:
self.has_before_ascending_ = 0
self.before_ascending_ = 0
def has_before_ascending(self): return self.has_before_ascending_
def MergeFrom(self, x):
assert x is not self
if (x.has_start_key()): self.set_start_key(x.start_key())
for i in xrange(x.indexvalue_size()): self.add_indexvalue().CopyFrom(x.indexvalue(i))
if (x.has_key()): self.mutable_key().MergeFrom(x.key())
if (x.has_start_inclusive()): self.set_start_inclusive(x.start_inclusive())
if (x.has_before_ascending()): self.set_before_ascending(x.before_ascending())
def Equals(self, x):
if x is self: return 1
if self.has_start_key_ != x.has_start_key_: return 0
if self.has_start_key_ and self.start_key_ != x.start_key_: return 0
if len(self.indexvalue_) != len(x.indexvalue_): return 0
for e1, e2 in zip(self.indexvalue_, x.indexvalue_):
if e1 != e2: return 0
if self.has_key_ != x.has_key_: return 0
if self.has_key_ and self.key_ != x.key_: return 0
if self.has_start_inclusive_ != x.has_start_inclusive_: return 0
if self.has_start_inclusive_ and self.start_inclusive_ != x.start_inclusive_: return 0
if self.has_before_ascending_ != x.has_before_ascending_: return 0
if self.has_before_ascending_ and self.before_ascending_ != x.before_ascending_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.indexvalue_:
if not p.IsInitialized(debug_strs): initialized=0
if (self.has_key_ and not self.key_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_start_key_): n += 2 + self.lengthString(len(self.start_key_))
n += 4 * len(self.indexvalue_)
for i in xrange(len(self.indexvalue_)): n += self.indexvalue_[i].ByteSize()
if (self.has_key_): n += 2 + self.lengthString(self.key_.ByteSize())
if (self.has_start_inclusive_): n += 3
if (self.has_before_ascending_): n += 3
return n
def ByteSizePartial(self):
n = 0
if (self.has_start_key_): n += 2 + self.lengthString(len(self.start_key_))
n += 4 * len(self.indexvalue_)
for i in xrange(len(self.indexvalue_)): n += self.indexvalue_[i].ByteSizePartial()
if (self.has_key_): n += 2 + self.lengthString(self.key_.ByteSizePartial())
if (self.has_start_inclusive_): n += 3
if (self.has_before_ascending_): n += 3
return n
def Clear(self):
self.clear_start_key()
self.clear_indexvalue()
self.clear_key()
self.clear_start_inclusive()
self.clear_before_ascending()
def OutputUnchecked(self, out):
if (self.has_start_key_):
out.putVarInt32(218)
out.putPrefixedString(self.start_key_)
if (self.has_start_inclusive_):
out.putVarInt32(224)
out.putBoolean(self.start_inclusive_)
for i in xrange(len(self.indexvalue_)):
out.putVarInt32(235)
self.indexvalue_[i].OutputUnchecked(out)
out.putVarInt32(236)
if (self.has_key_):
out.putVarInt32(258)
out.putVarInt32(self.key_.ByteSize())
self.key_.OutputUnchecked(out)
if (self.has_before_ascending_):
out.putVarInt32(264)
out.putBoolean(self.before_ascending_)
def OutputPartial(self, out):
if (self.has_start_key_):
out.putVarInt32(218)
out.putPrefixedString(self.start_key_)
if (self.has_start_inclusive_):
out.putVarInt32(224)
out.putBoolean(self.start_inclusive_)
for i in xrange(len(self.indexvalue_)):
out.putVarInt32(235)
self.indexvalue_[i].OutputPartial(out)
out.putVarInt32(236)
if (self.has_key_):
out.putVarInt32(258)
out.putVarInt32(self.key_.ByteSizePartial())
self.key_.OutputPartial(out)
if (self.has_before_ascending_):
out.putVarInt32(264)
out.putBoolean(self.before_ascending_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 20: break
if tt == 218:
self.set_start_key(d.getPrefixedString())
continue
if tt == 224:
self.set_start_inclusive(d.getBoolean())
continue
if tt == 235:
self.add_indexvalue().TryMerge(d)
continue
if tt == 258:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_key().TryMerge(tmp)
continue
if tt == 264:
self.set_before_ascending(d.getBoolean())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_start_key_: res+=prefix+("start_key: %s\n" % self.DebugFormatString(self.start_key_))
cnt=0
for e in self.indexvalue_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("IndexValue%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
if self.has_key_:
res+=prefix+"key <\n"
res+=self.key_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_start_inclusive_: res+=prefix+("start_inclusive: %s\n" % self.DebugFormatBool(self.start_inclusive_))
if self.has_before_ascending_: res+=prefix+("before_ascending: %s\n" % self.DebugFormatBool(self.before_ascending_))
return res
class CompiledCursor(ProtocolBuffer.ProtocolMessage):
has_position_ = 0
position_ = None
has_postfix_position_ = 0
postfix_position_ = None
has_absolute_position_ = 0
absolute_position_ = None
def __init__(self, contents=None):
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def position(self):
if self.position_ is None:
self.lazy_init_lock_.acquire()
try:
if self.position_ is None: self.position_ = CompiledCursor_Position()
finally:
self.lazy_init_lock_.release()
return self.position_
def mutable_position(self): self.has_position_ = 1; return self.position()
def clear_position(self):
if self.has_position_:
self.has_position_ = 0;
if self.position_ is not None: self.position_.Clear()
def has_position(self): return self.has_position_
def postfix_position(self):
if self.postfix_position_ is None:
self.lazy_init_lock_.acquire()
try:
if self.postfix_position_ is None: self.postfix_position_ = IndexPostfix()
finally:
self.lazy_init_lock_.release()
return self.postfix_position_
def mutable_postfix_position(self): self.has_postfix_position_ = 1; return self.postfix_position()
def clear_postfix_position(self):
if self.has_postfix_position_:
self.has_postfix_position_ = 0;
if self.postfix_position_ is not None: self.postfix_position_.Clear()
def has_postfix_position(self): return self.has_postfix_position_
def absolute_position(self):
if self.absolute_position_ is None:
self.lazy_init_lock_.acquire()
try:
if self.absolute_position_ is None: self.absolute_position_ = IndexPosition()
finally:
self.lazy_init_lock_.release()
return self.absolute_position_
def mutable_absolute_position(self): self.has_absolute_position_ = 1; return self.absolute_position()
def clear_absolute_position(self):
if self.has_absolute_position_:
self.has_absolute_position_ = 0;
if self.absolute_position_ is not None: self.absolute_position_.Clear()
def has_absolute_position(self): return self.has_absolute_position_
def MergeFrom(self, x):
assert x is not self
if (x.has_position()): self.mutable_position().MergeFrom(x.position())
if (x.has_postfix_position()): self.mutable_postfix_position().MergeFrom(x.postfix_position())
if (x.has_absolute_position()): self.mutable_absolute_position().MergeFrom(x.absolute_position())
def Equals(self, x):
if x is self: return 1
if self.has_position_ != x.has_position_: return 0
if self.has_position_ and self.position_ != x.position_: return 0
if self.has_postfix_position_ != x.has_postfix_position_: return 0
if self.has_postfix_position_ and self.postfix_position_ != x.postfix_position_: return 0
if self.has_absolute_position_ != x.has_absolute_position_: return 0
if self.has_absolute_position_ and self.absolute_position_ != x.absolute_position_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_position_ and not self.position_.IsInitialized(debug_strs)): initialized = 0
if (self.has_postfix_position_ and not self.postfix_position_.IsInitialized(debug_strs)): initialized = 0
if (self.has_absolute_position_ and not self.absolute_position_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_position_): n += 2 + self.position_.ByteSize()
if (self.has_postfix_position_): n += 1 + self.lengthString(self.postfix_position_.ByteSize())
if (self.has_absolute_position_): n += 1 + self.lengthString(self.absolute_position_.ByteSize())
return n
def ByteSizePartial(self):
n = 0
if (self.has_position_): n += 2 + self.position_.ByteSizePartial()
if (self.has_postfix_position_): n += 1 + self.lengthString(self.postfix_position_.ByteSizePartial())
if (self.has_absolute_position_): n += 1 + self.lengthString(self.absolute_position_.ByteSizePartial())
return n
def Clear(self):
self.clear_position()
self.clear_postfix_position()
self.clear_absolute_position()
def OutputUnchecked(self, out):
if (self.has_postfix_position_):
out.putVarInt32(10)
out.putVarInt32(self.postfix_position_.ByteSize())
self.postfix_position_.OutputUnchecked(out)
if (self.has_position_):
out.putVarInt32(19)
self.position_.OutputUnchecked(out)
out.putVarInt32(20)
if (self.has_absolute_position_):
out.putVarInt32(26)
out.putVarInt32(self.absolute_position_.ByteSize())
self.absolute_position_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_postfix_position_):
out.putVarInt32(10)
out.putVarInt32(self.postfix_position_.ByteSizePartial())
self.postfix_position_.OutputPartial(out)
if (self.has_position_):
out.putVarInt32(19)
self.position_.OutputPartial(out)
out.putVarInt32(20)
if (self.has_absolute_position_):
out.putVarInt32(26)
out.putVarInt32(self.absolute_position_.ByteSizePartial())
self.absolute_position_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_postfix_position().TryMerge(tmp)
continue
if tt == 19:
self.mutable_position().TryMerge(d)
continue
if tt == 26:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_absolute_position().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_position_:
res+=prefix+"Position {\n"
res+=self.position_.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
if self.has_postfix_position_:
res+=prefix+"postfix_position <\n"
res+=self.postfix_position_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_absolute_position_:
res+=prefix+"absolute_position <\n"
res+=self.absolute_position_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kPositionGroup = 2
kPositionstart_key = 27
kPositionIndexValueGroup = 29
kPositionIndexValueproperty = 30
kPositionIndexValuevalue = 31
kPositionkey = 32
kPositionstart_inclusive = 28
kPositionbefore_ascending = 33
kpostfix_position = 1
kabsolute_position = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "postfix_position",
2: "Position",
3: "absolute_position",
27: "start_key",
28: "start_inclusive",
29: "IndexValue",
30: "property",
31: "value",
32: "key",
33: "before_ascending",
}, 33)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STARTGROUP,
3: ProtocolBuffer.Encoder.STRING,
27: ProtocolBuffer.Encoder.STRING,
28: ProtocolBuffer.Encoder.NUMERIC,
29: ProtocolBuffer.Encoder.STARTGROUP,
30: ProtocolBuffer.Encoder.STRING,
31: ProtocolBuffer.Encoder.STRING,
32: ProtocolBuffer.Encoder.STRING,
33: ProtocolBuffer.Encoder.NUMERIC,
}, 33, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.CompiledCursor'
class Cursor(ProtocolBuffer.ProtocolMessage):
has_cursor_ = 0
cursor_ = 0
has_app_ = 0
app_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def cursor(self): return self.cursor_
def set_cursor(self, x):
self.has_cursor_ = 1
self.cursor_ = x
def clear_cursor(self):
if self.has_cursor_:
self.has_cursor_ = 0
self.cursor_ = 0
def has_cursor(self): return self.has_cursor_
def app(self): return self.app_
def set_app(self, x):
self.has_app_ = 1
self.app_ = x
def clear_app(self):
if self.has_app_:
self.has_app_ = 0
self.app_ = ""
def has_app(self): return self.has_app_
def MergeFrom(self, x):
assert x is not self
if (x.has_cursor()): self.set_cursor(x.cursor())
if (x.has_app()): self.set_app(x.app())
def Equals(self, x):
if x is self: return 1
if self.has_cursor_ != x.has_cursor_: return 0
if self.has_cursor_ and self.cursor_ != x.cursor_: return 0
if self.has_app_ != x.has_app_: return 0
if self.has_app_ and self.app_ != x.app_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_cursor_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: cursor not set.')
return initialized
def ByteSize(self):
n = 0
if (self.has_app_): n += 1 + self.lengthString(len(self.app_))
return n + 9
def ByteSizePartial(self):
n = 0
if (self.has_cursor_):
n += 9
if (self.has_app_): n += 1 + self.lengthString(len(self.app_))
return n
def Clear(self):
self.clear_cursor()
self.clear_app()
def OutputUnchecked(self, out):
out.putVarInt32(9)
out.put64(self.cursor_)
if (self.has_app_):
out.putVarInt32(18)
out.putPrefixedString(self.app_)
def OutputPartial(self, out):
if (self.has_cursor_):
out.putVarInt32(9)
out.put64(self.cursor_)
if (self.has_app_):
out.putVarInt32(18)
out.putPrefixedString(self.app_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 9:
self.set_cursor(d.get64())
continue
if tt == 18:
self.set_app(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_cursor_: res+=prefix+("cursor: %s\n" % self.DebugFormatFixed64(self.cursor_))
if self.has_app_: res+=prefix+("app: %s\n" % self.DebugFormatString(self.app_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kcursor = 1
kapp = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "cursor",
2: "app",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.DOUBLE,
2: ProtocolBuffer.Encoder.STRING,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.Cursor'
class Error(ProtocolBuffer.ProtocolMessage):
BAD_REQUEST = 1
CONCURRENT_TRANSACTION = 2
INTERNAL_ERROR = 3
NEED_INDEX = 4
TIMEOUT = 5
PERMISSION_DENIED = 6
BIGTABLE_ERROR = 7
COMMITTED_BUT_STILL_APPLYING = 8
CAPABILITY_DISABLED = 9
TRY_ALTERNATE_BACKEND = 10
SAFE_TIME_TOO_OLD = 11
_ErrorCode_NAMES = {
1: "BAD_REQUEST",
2: "CONCURRENT_TRANSACTION",
3: "INTERNAL_ERROR",
4: "NEED_INDEX",
5: "TIMEOUT",
6: "PERMISSION_DENIED",
7: "BIGTABLE_ERROR",
8: "COMMITTED_BUT_STILL_APPLYING",
9: "CAPABILITY_DISABLED",
10: "TRY_ALTERNATE_BACKEND",
11: "SAFE_TIME_TOO_OLD",
}
def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
ErrorCode_Name = classmethod(ErrorCode_Name)
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n
def ByteSizePartial(self):
n = 0
return n
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def OutputPartial(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
}, 0)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
}, 0, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.Error'
class Cost_CommitCost(ProtocolBuffer.ProtocolMessage):
has_requested_entity_puts_ = 0
requested_entity_puts_ = 0
has_requested_entity_deletes_ = 0
requested_entity_deletes_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def requested_entity_puts(self): return self.requested_entity_puts_
def set_requested_entity_puts(self, x):
self.has_requested_entity_puts_ = 1
self.requested_entity_puts_ = x
def clear_requested_entity_puts(self):
if self.has_requested_entity_puts_:
self.has_requested_entity_puts_ = 0
self.requested_entity_puts_ = 0
def has_requested_entity_puts(self): return self.has_requested_entity_puts_
def requested_entity_deletes(self): return self.requested_entity_deletes_
def set_requested_entity_deletes(self, x):
self.has_requested_entity_deletes_ = 1
self.requested_entity_deletes_ = x
def clear_requested_entity_deletes(self):
if self.has_requested_entity_deletes_:
self.has_requested_entity_deletes_ = 0
self.requested_entity_deletes_ = 0
def has_requested_entity_deletes(self): return self.has_requested_entity_deletes_
def MergeFrom(self, x):
assert x is not self
if (x.has_requested_entity_puts()): self.set_requested_entity_puts(x.requested_entity_puts())
if (x.has_requested_entity_deletes()): self.set_requested_entity_deletes(x.requested_entity_deletes())
def Equals(self, x):
if x is self: return 1
if self.has_requested_entity_puts_ != x.has_requested_entity_puts_: return 0
if self.has_requested_entity_puts_ and self.requested_entity_puts_ != x.requested_entity_puts_: return 0
if self.has_requested_entity_deletes_ != x.has_requested_entity_deletes_: return 0
if self.has_requested_entity_deletes_ and self.requested_entity_deletes_ != x.requested_entity_deletes_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
if (self.has_requested_entity_puts_): n += 1 + self.lengthVarInt64(self.requested_entity_puts_)
if (self.has_requested_entity_deletes_): n += 1 + self.lengthVarInt64(self.requested_entity_deletes_)
return n
def ByteSizePartial(self):
n = 0
if (self.has_requested_entity_puts_): n += 1 + self.lengthVarInt64(self.requested_entity_puts_)
if (self.has_requested_entity_deletes_): n += 1 + self.lengthVarInt64(self.requested_entity_deletes_)
return n
def Clear(self):
self.clear_requested_entity_puts()
self.clear_requested_entity_deletes()
def OutputUnchecked(self, out):
if (self.has_requested_entity_puts_):
out.putVarInt32(48)
out.putVarInt32(self.requested_entity_puts_)
if (self.has_requested_entity_deletes_):
out.putVarInt32(56)
out.putVarInt32(self.requested_entity_deletes_)
def OutputPartial(self, out):
if (self.has_requested_entity_puts_):
out.putVarInt32(48)
out.putVarInt32(self.requested_entity_puts_)
if (self.has_requested_entity_deletes_):
out.putVarInt32(56)
out.putVarInt32(self.requested_entity_deletes_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 44: break
if tt == 48:
self.set_requested_entity_puts(d.getVarInt32())
continue
if tt == 56:
self.set_requested_entity_deletes(d.getVarInt32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_requested_entity_puts_: res+=prefix+("requested_entity_puts: %s\n" % self.DebugFormatInt32(self.requested_entity_puts_))
if self.has_requested_entity_deletes_: res+=prefix+("requested_entity_deletes: %s\n" % self.DebugFormatInt32(self.requested_entity_deletes_))
return res
class Cost(ProtocolBuffer.ProtocolMessage):
has_index_writes_ = 0
index_writes_ = 0
has_index_write_bytes_ = 0
index_write_bytes_ = 0
has_entity_writes_ = 0
entity_writes_ = 0
has_entity_write_bytes_ = 0
entity_write_bytes_ = 0
has_commitcost_ = 0
commitcost_ = None
has_approximate_storage_delta_ = 0
approximate_storage_delta_ = 0
has_id_sequence_updates_ = 0
id_sequence_updates_ = 0
def __init__(self, contents=None):
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def index_writes(self): return self.index_writes_
def set_index_writes(self, x):
self.has_index_writes_ = 1
self.index_writes_ = x
def clear_index_writes(self):
if self.has_index_writes_:
self.has_index_writes_ = 0
self.index_writes_ = 0
def has_index_writes(self): return self.has_index_writes_
def index_write_bytes(self): return self.index_write_bytes_
def set_index_write_bytes(self, x):
self.has_index_write_bytes_ = 1
self.index_write_bytes_ = x
def clear_index_write_bytes(self):
if self.has_index_write_bytes_:
self.has_index_write_bytes_ = 0
self.index_write_bytes_ = 0
def has_index_write_bytes(self): return self.has_index_write_bytes_
def entity_writes(self): return self.entity_writes_
def set_entity_writes(self, x):
self.has_entity_writes_ = 1
self.entity_writes_ = x
def clear_entity_writes(self):
if self.has_entity_writes_:
self.has_entity_writes_ = 0
self.entity_writes_ = 0
def has_entity_writes(self): return self.has_entity_writes_
def entity_write_bytes(self): return self.entity_write_bytes_
def set_entity_write_bytes(self, x):
self.has_entity_write_bytes_ = 1
self.entity_write_bytes_ = x
def clear_entity_write_bytes(self):
if self.has_entity_write_bytes_:
self.has_entity_write_bytes_ = 0
self.entity_write_bytes_ = 0
def has_entity_write_bytes(self): return self.has_entity_write_bytes_
def commitcost(self):
if self.commitcost_ is None:
self.lazy_init_lock_.acquire()
try:
if self.commitcost_ is None: self.commitcost_ = Cost_CommitCost()
finally:
self.lazy_init_lock_.release()
return self.commitcost_
def mutable_commitcost(self): self.has_commitcost_ = 1; return self.commitcost()
def clear_commitcost(self):
if self.has_commitcost_:
self.has_commitcost_ = 0;
if self.commitcost_ is not None: self.commitcost_.Clear()
def has_commitcost(self): return self.has_commitcost_
def approximate_storage_delta(self): return self.approximate_storage_delta_
def set_approximate_storage_delta(self, x):
self.has_approximate_storage_delta_ = 1
self.approximate_storage_delta_ = x
def clear_approximate_storage_delta(self):
if self.has_approximate_storage_delta_:
self.has_approximate_storage_delta_ = 0
self.approximate_storage_delta_ = 0
def has_approximate_storage_delta(self): return self.has_approximate_storage_delta_
def id_sequence_updates(self): return self.id_sequence_updates_
def set_id_sequence_updates(self, x):
self.has_id_sequence_updates_ = 1
self.id_sequence_updates_ = x
def clear_id_sequence_updates(self):
if self.has_id_sequence_updates_:
self.has_id_sequence_updates_ = 0
self.id_sequence_updates_ = 0
def has_id_sequence_updates(self): return self.has_id_sequence_updates_
def MergeFrom(self, x):
assert x is not self
if (x.has_index_writes()): self.set_index_writes(x.index_writes())
if (x.has_index_write_bytes()): self.set_index_write_bytes(x.index_write_bytes())
if (x.has_entity_writes()): self.set_entity_writes(x.entity_writes())
if (x.has_entity_write_bytes()): self.set_entity_write_bytes(x.entity_write_bytes())
if (x.has_commitcost()): self.mutable_commitcost().MergeFrom(x.commitcost())
if (x.has_approximate_storage_delta()): self.set_approximate_storage_delta(x.approximate_storage_delta())
if (x.has_id_sequence_updates()): self.set_id_sequence_updates(x.id_sequence_updates())
def Equals(self, x):
if x is self: return 1
if self.has_index_writes_ != x.has_index_writes_: return 0
if self.has_index_writes_ and self.index_writes_ != x.index_writes_: return 0
if self.has_index_write_bytes_ != x.has_index_write_bytes_: return 0
if self.has_index_write_bytes_ and self.index_write_bytes_ != x.index_write_bytes_: return 0
if self.has_entity_writes_ != x.has_entity_writes_: return 0
if self.has_entity_writes_ and self.entity_writes_ != x.entity_writes_: return 0
if self.has_entity_write_bytes_ != x.has_entity_write_bytes_: return 0
if self.has_entity_write_bytes_ and self.entity_write_bytes_ != x.entity_write_bytes_: return 0
if self.has_commitcost_ != x.has_commitcost_: return 0
if self.has_commitcost_ and self.commitcost_ != x.commitcost_: return 0
if self.has_approximate_storage_delta_ != x.has_approximate_storage_delta_: return 0
if self.has_approximate_storage_delta_ and self.approximate_storage_delta_ != x.approximate_storage_delta_: return 0
if self.has_id_sequence_updates_ != x.has_id_sequence_updates_: return 0
if self.has_id_sequence_updates_ and self.id_sequence_updates_ != x.id_sequence_updates_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_commitcost_ and not self.commitcost_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_index_writes_): n += 1 + self.lengthVarInt64(self.index_writes_)
if (self.has_index_write_bytes_): n += 1 + self.lengthVarInt64(self.index_write_bytes_)
if (self.has_entity_writes_): n += 1 + self.lengthVarInt64(self.entity_writes_)
if (self.has_entity_write_bytes_): n += 1 + self.lengthVarInt64(self.entity_write_bytes_)
if (self.has_commitcost_): n += 2 + self.commitcost_.ByteSize()
if (self.has_approximate_storage_delta_): n += 1 + self.lengthVarInt64(self.approximate_storage_delta_)
if (self.has_id_sequence_updates_): n += 1 + self.lengthVarInt64(self.id_sequence_updates_)
return n
def ByteSizePartial(self):
n = 0
if (self.has_index_writes_): n += 1 + self.lengthVarInt64(self.index_writes_)
if (self.has_index_write_bytes_): n += 1 + self.lengthVarInt64(self.index_write_bytes_)
if (self.has_entity_writes_): n += 1 + self.lengthVarInt64(self.entity_writes_)
if (self.has_entity_write_bytes_): n += 1 + self.lengthVarInt64(self.entity_write_bytes_)
if (self.has_commitcost_): n += 2 + self.commitcost_.ByteSizePartial()
if (self.has_approximate_storage_delta_): n += 1 + self.lengthVarInt64(self.approximate_storage_delta_)
if (self.has_id_sequence_updates_): n += 1 + self.lengthVarInt64(self.id_sequence_updates_)
return n
def Clear(self):
self.clear_index_writes()
self.clear_index_write_bytes()
self.clear_entity_writes()
self.clear_entity_write_bytes()
self.clear_commitcost()
self.clear_approximate_storage_delta()
self.clear_id_sequence_updates()
def OutputUnchecked(self, out):
if (self.has_index_writes_):
out.putVarInt32(8)
out.putVarInt32(self.index_writes_)
if (self.has_index_write_bytes_):
out.putVarInt32(16)
out.putVarInt32(self.index_write_bytes_)
if (self.has_entity_writes_):
out.putVarInt32(24)
out.putVarInt32(self.entity_writes_)
if (self.has_entity_write_bytes_):
out.putVarInt32(32)
out.putVarInt32(self.entity_write_bytes_)
if (self.has_commitcost_):
out.putVarInt32(43)
self.commitcost_.OutputUnchecked(out)
out.putVarInt32(44)
if (self.has_approximate_storage_delta_):
out.putVarInt32(64)
out.putVarInt32(self.approximate_storage_delta_)
if (self.has_id_sequence_updates_):
out.putVarInt32(72)
out.putVarInt32(self.id_sequence_updates_)
def OutputPartial(self, out):
if (self.has_index_writes_):
out.putVarInt32(8)
out.putVarInt32(self.index_writes_)
if (self.has_index_write_bytes_):
out.putVarInt32(16)
out.putVarInt32(self.index_write_bytes_)
if (self.has_entity_writes_):
out.putVarInt32(24)
out.putVarInt32(self.entity_writes_)
if (self.has_entity_write_bytes_):
out.putVarInt32(32)
out.putVarInt32(self.entity_write_bytes_)
if (self.has_commitcost_):
out.putVarInt32(43)
self.commitcost_.OutputPartial(out)
out.putVarInt32(44)
if (self.has_approximate_storage_delta_):
out.putVarInt32(64)
out.putVarInt32(self.approximate_storage_delta_)
if (self.has_id_sequence_updates_):
out.putVarInt32(72)
out.putVarInt32(self.id_sequence_updates_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_index_writes(d.getVarInt32())
continue
if tt == 16:
self.set_index_write_bytes(d.getVarInt32())
continue
if tt == 24:
self.set_entity_writes(d.getVarInt32())
continue
if tt == 32:
self.set_entity_write_bytes(d.getVarInt32())
continue
if tt == 43:
self.mutable_commitcost().TryMerge(d)
continue
if tt == 64:
self.set_approximate_storage_delta(d.getVarInt32())
continue
if tt == 72:
self.set_id_sequence_updates(d.getVarInt32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_index_writes_: res+=prefix+("index_writes: %s\n" % self.DebugFormatInt32(self.index_writes_))
if self.has_index_write_bytes_: res+=prefix+("index_write_bytes: %s\n" % self.DebugFormatInt32(self.index_write_bytes_))
if self.has_entity_writes_: res+=prefix+("entity_writes: %s\n" % self.DebugFormatInt32(self.entity_writes_))
if self.has_entity_write_bytes_: res+=prefix+("entity_write_bytes: %s\n" % self.DebugFormatInt32(self.entity_write_bytes_))
if self.has_commitcost_:
res+=prefix+"CommitCost {\n"
res+=self.commitcost_.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
if self.has_approximate_storage_delta_: res+=prefix+("approximate_storage_delta: %s\n" % self.DebugFormatInt32(self.approximate_storage_delta_))
if self.has_id_sequence_updates_: res+=prefix+("id_sequence_updates: %s\n" % self.DebugFormatInt32(self.id_sequence_updates_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kindex_writes = 1
kindex_write_bytes = 2
kentity_writes = 3
kentity_write_bytes = 4
kCommitCostGroup = 5
kCommitCostrequested_entity_puts = 6
kCommitCostrequested_entity_deletes = 7
kapproximate_storage_delta = 8
kid_sequence_updates = 9
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "index_writes",
2: "index_write_bytes",
3: "entity_writes",
4: "entity_write_bytes",
5: "CommitCost",
6: "requested_entity_puts",
7: "requested_entity_deletes",
8: "approximate_storage_delta",
9: "id_sequence_updates",
}, 9)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.NUMERIC,
2: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.NUMERIC,
5: ProtocolBuffer.Encoder.STARTGROUP,
6: ProtocolBuffer.Encoder.NUMERIC,
7: ProtocolBuffer.Encoder.NUMERIC,
8: ProtocolBuffer.Encoder.NUMERIC,
9: ProtocolBuffer.Encoder.NUMERIC,
}, 9, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.Cost'
class GetRequest(ProtocolBuffer.ProtocolMessage):
has_header_ = 0
header_ = None
has_transaction_ = 0
transaction_ = None
has_failover_ms_ = 0
failover_ms_ = 0
has_strong_ = 0
strong_ = 0
has_allow_deferred_ = 0
allow_deferred_ = 0
def __init__(self, contents=None):
self.key_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def header(self):
if self.header_ is None:
self.lazy_init_lock_.acquire()
try:
if self.header_ is None: self.header_ = InternalHeader()
finally:
self.lazy_init_lock_.release()
return self.header_
def mutable_header(self): self.has_header_ = 1; return self.header()
def clear_header(self):
if self.has_header_:
self.has_header_ = 0;
if self.header_ is not None: self.header_.Clear()
def has_header(self): return self.has_header_
def key_size(self): return len(self.key_)
def key_list(self): return self.key_
def key(self, i):
return self.key_[i]
def mutable_key(self, i):
return self.key_[i]
def add_key(self):
x = Reference()
self.key_.append(x)
return x
def clear_key(self):
self.key_ = []
def transaction(self):
if self.transaction_ is None:
self.lazy_init_lock_.acquire()
try:
if self.transaction_ is None: self.transaction_ = Transaction()
finally:
self.lazy_init_lock_.release()
return self.transaction_
def mutable_transaction(self): self.has_transaction_ = 1; return self.transaction()
def clear_transaction(self):
if self.has_transaction_:
self.has_transaction_ = 0;
if self.transaction_ is not None: self.transaction_.Clear()
def has_transaction(self): return self.has_transaction_
def failover_ms(self): return self.failover_ms_
def set_failover_ms(self, x):
self.has_failover_ms_ = 1
self.failover_ms_ = x
def clear_failover_ms(self):
if self.has_failover_ms_:
self.has_failover_ms_ = 0
self.failover_ms_ = 0
def has_failover_ms(self): return self.has_failover_ms_
def strong(self): return self.strong_
def set_strong(self, x):
self.has_strong_ = 1
self.strong_ = x
def clear_strong(self):
if self.has_strong_:
self.has_strong_ = 0
self.strong_ = 0
def has_strong(self): return self.has_strong_
def allow_deferred(self): return self.allow_deferred_
def set_allow_deferred(self, x):
self.has_allow_deferred_ = 1
self.allow_deferred_ = x
def clear_allow_deferred(self):
if self.has_allow_deferred_:
self.has_allow_deferred_ = 0
self.allow_deferred_ = 0
def has_allow_deferred(self): return self.has_allow_deferred_
def MergeFrom(self, x):
assert x is not self
if (x.has_header()): self.mutable_header().MergeFrom(x.header())
for i in xrange(x.key_size()): self.add_key().CopyFrom(x.key(i))
if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction())
if (x.has_failover_ms()): self.set_failover_ms(x.failover_ms())
if (x.has_strong()): self.set_strong(x.strong())
if (x.has_allow_deferred()): self.set_allow_deferred(x.allow_deferred())
def Equals(self, x):
if x is self: return 1
if self.has_header_ != x.has_header_: return 0
if self.has_header_ and self.header_ != x.header_: return 0
if len(self.key_) != len(x.key_): return 0
for e1, e2 in zip(self.key_, x.key_):
if e1 != e2: return 0
if self.has_transaction_ != x.has_transaction_: return 0
if self.has_transaction_ and self.transaction_ != x.transaction_: return 0
if self.has_failover_ms_ != x.has_failover_ms_: return 0
if self.has_failover_ms_ and self.failover_ms_ != x.failover_ms_: return 0
if self.has_strong_ != x.has_strong_: return 0
if self.has_strong_ and self.strong_ != x.strong_: return 0
if self.has_allow_deferred_ != x.has_allow_deferred_: return 0
if self.has_allow_deferred_ and self.allow_deferred_ != x.allow_deferred_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_header_ and not self.header_.IsInitialized(debug_strs)): initialized = 0
for p in self.key_:
if not p.IsInitialized(debug_strs): initialized=0
if (self.has_transaction_ and not self.transaction_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSize())
n += 1 * len(self.key_)
for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSize())
if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSize())
if (self.has_failover_ms_): n += 1 + self.lengthVarInt64(self.failover_ms_)
if (self.has_strong_): n += 2
if (self.has_allow_deferred_): n += 2
return n
def ByteSizePartial(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSizePartial())
n += 1 * len(self.key_)
for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSizePartial())
if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSizePartial())
if (self.has_failover_ms_): n += 1 + self.lengthVarInt64(self.failover_ms_)
if (self.has_strong_): n += 2
if (self.has_allow_deferred_): n += 2
return n
def Clear(self):
self.clear_header()
self.clear_key()
self.clear_transaction()
self.clear_failover_ms()
self.clear_strong()
self.clear_allow_deferred()
def OutputUnchecked(self, out):
for i in xrange(len(self.key_)):
out.putVarInt32(10)
out.putVarInt32(self.key_[i].ByteSize())
self.key_[i].OutputUnchecked(out)
if (self.has_transaction_):
out.putVarInt32(18)
out.putVarInt32(self.transaction_.ByteSize())
self.transaction_.OutputUnchecked(out)
if (self.has_failover_ms_):
out.putVarInt32(24)
out.putVarInt64(self.failover_ms_)
if (self.has_strong_):
out.putVarInt32(32)
out.putBoolean(self.strong_)
if (self.has_allow_deferred_):
out.putVarInt32(40)
out.putBoolean(self.allow_deferred_)
if (self.has_header_):
out.putVarInt32(50)
out.putVarInt32(self.header_.ByteSize())
self.header_.OutputUnchecked(out)
def OutputPartial(self, out):
for i in xrange(len(self.key_)):
out.putVarInt32(10)
out.putVarInt32(self.key_[i].ByteSizePartial())
self.key_[i].OutputPartial(out)
if (self.has_transaction_):
out.putVarInt32(18)
out.putVarInt32(self.transaction_.ByteSizePartial())
self.transaction_.OutputPartial(out)
if (self.has_failover_ms_):
out.putVarInt32(24)
out.putVarInt64(self.failover_ms_)
if (self.has_strong_):
out.putVarInt32(32)
out.putBoolean(self.strong_)
if (self.has_allow_deferred_):
out.putVarInt32(40)
out.putBoolean(self.allow_deferred_)
if (self.has_header_):
out.putVarInt32(50)
out.putVarInt32(self.header_.ByteSizePartial())
self.header_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_key().TryMerge(tmp)
continue
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_transaction().TryMerge(tmp)
continue
if tt == 24:
self.set_failover_ms(d.getVarInt64())
continue
if tt == 32:
self.set_strong(d.getBoolean())
continue
if tt == 40:
self.set_allow_deferred(d.getBoolean())
continue
if tt == 50:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_header().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_header_:
res+=prefix+"header <\n"
res+=self.header_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.key_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("key%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_transaction_:
res+=prefix+"transaction <\n"
res+=self.transaction_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_failover_ms_: res+=prefix+("failover_ms: %s\n" % self.DebugFormatInt64(self.failover_ms_))
if self.has_strong_: res+=prefix+("strong: %s\n" % self.DebugFormatBool(self.strong_))
if self.has_allow_deferred_: res+=prefix+("allow_deferred: %s\n" % self.DebugFormatBool(self.allow_deferred_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kheader = 6
kkey = 1
ktransaction = 2
kfailover_ms = 3
kstrong = 4
kallow_deferred = 5
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "key",
2: "transaction",
3: "failover_ms",
4: "strong",
5: "allow_deferred",
6: "header",
}, 6)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.NUMERIC,
5: ProtocolBuffer.Encoder.NUMERIC,
6: ProtocolBuffer.Encoder.STRING,
}, 6, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.GetRequest'
class GetResponse_Entity(ProtocolBuffer.ProtocolMessage):
has_entity_ = 0
entity_ = None
has_key_ = 0
key_ = None
has_version_ = 0
version_ = 0
def __init__(self, contents=None):
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def entity(self):
if self.entity_ is None:
self.lazy_init_lock_.acquire()
try:
if self.entity_ is None: self.entity_ = EntityProto()
finally:
self.lazy_init_lock_.release()
return self.entity_
def mutable_entity(self): self.has_entity_ = 1; return self.entity()
def clear_entity(self):
if self.has_entity_:
self.has_entity_ = 0;
if self.entity_ is not None: self.entity_.Clear()
def has_entity(self): return self.has_entity_
def key(self):
if self.key_ is None:
self.lazy_init_lock_.acquire()
try:
if self.key_ is None: self.key_ = Reference()
finally:
self.lazy_init_lock_.release()
return self.key_
def mutable_key(self): self.has_key_ = 1; return self.key()
def clear_key(self):
if self.has_key_:
self.has_key_ = 0;
if self.key_ is not None: self.key_.Clear()
def has_key(self): return self.has_key_
def version(self): return self.version_
def set_version(self, x):
self.has_version_ = 1
self.version_ = x
def clear_version(self):
if self.has_version_:
self.has_version_ = 0
self.version_ = 0
def has_version(self): return self.has_version_
def MergeFrom(self, x):
assert x is not self
if (x.has_entity()): self.mutable_entity().MergeFrom(x.entity())
if (x.has_key()): self.mutable_key().MergeFrom(x.key())
if (x.has_version()): self.set_version(x.version())
def Equals(self, x):
if x is self: return 1
if self.has_entity_ != x.has_entity_: return 0
if self.has_entity_ and self.entity_ != x.entity_: return 0
if self.has_key_ != x.has_key_: return 0
if self.has_key_ and self.key_ != x.key_: return 0
if self.has_version_ != x.has_version_: return 0
if self.has_version_ and self.version_ != x.version_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_entity_ and not self.entity_.IsInitialized(debug_strs)): initialized = 0
if (self.has_key_ and not self.key_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_entity_): n += 1 + self.lengthString(self.entity_.ByteSize())
if (self.has_key_): n += 1 + self.lengthString(self.key_.ByteSize())
if (self.has_version_): n += 1 + self.lengthVarInt64(self.version_)
return n
def ByteSizePartial(self):
n = 0
if (self.has_entity_): n += 1 + self.lengthString(self.entity_.ByteSizePartial())
if (self.has_key_): n += 1 + self.lengthString(self.key_.ByteSizePartial())
if (self.has_version_): n += 1 + self.lengthVarInt64(self.version_)
return n
def Clear(self):
self.clear_entity()
self.clear_key()
self.clear_version()
def OutputUnchecked(self, out):
if (self.has_entity_):
out.putVarInt32(18)
out.putVarInt32(self.entity_.ByteSize())
self.entity_.OutputUnchecked(out)
if (self.has_version_):
out.putVarInt32(24)
out.putVarInt64(self.version_)
if (self.has_key_):
out.putVarInt32(34)
out.putVarInt32(self.key_.ByteSize())
self.key_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_entity_):
out.putVarInt32(18)
out.putVarInt32(self.entity_.ByteSizePartial())
self.entity_.OutputPartial(out)
if (self.has_version_):
out.putVarInt32(24)
out.putVarInt64(self.version_)
if (self.has_key_):
out.putVarInt32(34)
out.putVarInt32(self.key_.ByteSizePartial())
self.key_.OutputPartial(out)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 12: break
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_entity().TryMerge(tmp)
continue
if tt == 24:
self.set_version(d.getVarInt64())
continue
if tt == 34:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_key().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_entity_:
res+=prefix+"entity <\n"
res+=self.entity_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_key_:
res+=prefix+"key <\n"
res+=self.key_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_version_: res+=prefix+("version: %s\n" % self.DebugFormatInt64(self.version_))
return res
class GetResponse(ProtocolBuffer.ProtocolMessage):
has_in_order_ = 0
in_order_ = 1
def __init__(self, contents=None):
self.entity_ = []
self.deferred_ = []
if contents is not None: self.MergeFromString(contents)
def entity_size(self): return len(self.entity_)
def entity_list(self): return self.entity_
def entity(self, i):
return self.entity_[i]
def mutable_entity(self, i):
return self.entity_[i]
def add_entity(self):
x = GetResponse_Entity()
self.entity_.append(x)
return x
def clear_entity(self):
self.entity_ = []
def deferred_size(self): return len(self.deferred_)
def deferred_list(self): return self.deferred_
def deferred(self, i):
return self.deferred_[i]
def mutable_deferred(self, i):
return self.deferred_[i]
def add_deferred(self):
x = Reference()
self.deferred_.append(x)
return x
def clear_deferred(self):
self.deferred_ = []
def in_order(self): return self.in_order_
def set_in_order(self, x):
self.has_in_order_ = 1
self.in_order_ = x
def clear_in_order(self):
if self.has_in_order_:
self.has_in_order_ = 0
self.in_order_ = 1
def has_in_order(self): return self.has_in_order_
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.entity_size()): self.add_entity().CopyFrom(x.entity(i))
for i in xrange(x.deferred_size()): self.add_deferred().CopyFrom(x.deferred(i))
if (x.has_in_order()): self.set_in_order(x.in_order())
def Equals(self, x):
if x is self: return 1
if len(self.entity_) != len(x.entity_): return 0
for e1, e2 in zip(self.entity_, x.entity_):
if e1 != e2: return 0
if len(self.deferred_) != len(x.deferred_): return 0
for e1, e2 in zip(self.deferred_, x.deferred_):
if e1 != e2: return 0
if self.has_in_order_ != x.has_in_order_: return 0
if self.has_in_order_ and self.in_order_ != x.in_order_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.entity_:
if not p.IsInitialized(debug_strs): initialized=0
for p in self.deferred_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += 2 * len(self.entity_)
for i in xrange(len(self.entity_)): n += self.entity_[i].ByteSize()
n += 1 * len(self.deferred_)
for i in xrange(len(self.deferred_)): n += self.lengthString(self.deferred_[i].ByteSize())
if (self.has_in_order_): n += 2
return n
def ByteSizePartial(self):
n = 0
n += 2 * len(self.entity_)
for i in xrange(len(self.entity_)): n += self.entity_[i].ByteSizePartial()
n += 1 * len(self.deferred_)
for i in xrange(len(self.deferred_)): n += self.lengthString(self.deferred_[i].ByteSizePartial())
if (self.has_in_order_): n += 2
return n
def Clear(self):
self.clear_entity()
self.clear_deferred()
self.clear_in_order()
def OutputUnchecked(self, out):
for i in xrange(len(self.entity_)):
out.putVarInt32(11)
self.entity_[i].OutputUnchecked(out)
out.putVarInt32(12)
for i in xrange(len(self.deferred_)):
out.putVarInt32(42)
out.putVarInt32(self.deferred_[i].ByteSize())
self.deferred_[i].OutputUnchecked(out)
if (self.has_in_order_):
out.putVarInt32(48)
out.putBoolean(self.in_order_)
def OutputPartial(self, out):
for i in xrange(len(self.entity_)):
out.putVarInt32(11)
self.entity_[i].OutputPartial(out)
out.putVarInt32(12)
for i in xrange(len(self.deferred_)):
out.putVarInt32(42)
out.putVarInt32(self.deferred_[i].ByteSizePartial())
self.deferred_[i].OutputPartial(out)
if (self.has_in_order_):
out.putVarInt32(48)
out.putBoolean(self.in_order_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 11:
self.add_entity().TryMerge(d)
continue
if tt == 42:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_deferred().TryMerge(tmp)
continue
if tt == 48:
self.set_in_order(d.getBoolean())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.entity_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Entity%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
cnt=0
for e in self.deferred_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("deferred%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_in_order_: res+=prefix+("in_order: %s\n" % self.DebugFormatBool(self.in_order_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kEntityGroup = 1
kEntityentity = 2
kEntitykey = 4
kEntityversion = 3
kdeferred = 5
kin_order = 6
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "Entity",
2: "entity",
3: "version",
4: "key",
5: "deferred",
6: "in_order",
}, 6)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STARTGROUP,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.STRING,
5: ProtocolBuffer.Encoder.STRING,
6: ProtocolBuffer.Encoder.NUMERIC,
}, 6, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.GetResponse'
class PutRequest(ProtocolBuffer.ProtocolMessage):
CURRENT = 0
SEQUENTIAL = 1
_AutoIdPolicy_NAMES = {
0: "CURRENT",
1: "SEQUENTIAL",
}
def AutoIdPolicy_Name(cls, x): return cls._AutoIdPolicy_NAMES.get(x, "")
AutoIdPolicy_Name = classmethod(AutoIdPolicy_Name)
has_header_ = 0
header_ = None
has_transaction_ = 0
transaction_ = None
has_trusted_ = 0
trusted_ = 0
has_force_ = 0
force_ = 0
has_mark_changes_ = 0
mark_changes_ = 0
has_auto_id_policy_ = 0
auto_id_policy_ = 0
def __init__(self, contents=None):
self.entity_ = []
self.composite_index_ = []
self.snapshot_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def header(self):
if self.header_ is None:
self.lazy_init_lock_.acquire()
try:
if self.header_ is None: self.header_ = InternalHeader()
finally:
self.lazy_init_lock_.release()
return self.header_
def mutable_header(self): self.has_header_ = 1; return self.header()
def clear_header(self):
if self.has_header_:
self.has_header_ = 0;
if self.header_ is not None: self.header_.Clear()
def has_header(self): return self.has_header_
def entity_size(self): return len(self.entity_)
def entity_list(self): return self.entity_
def entity(self, i):
return self.entity_[i]
def mutable_entity(self, i):
return self.entity_[i]
def add_entity(self):
x = EntityProto()
self.entity_.append(x)
return x
def clear_entity(self):
self.entity_ = []
def transaction(self):
if self.transaction_ is None:
self.lazy_init_lock_.acquire()
try:
if self.transaction_ is None: self.transaction_ = Transaction()
finally:
self.lazy_init_lock_.release()
return self.transaction_
def mutable_transaction(self): self.has_transaction_ = 1; return self.transaction()
def clear_transaction(self):
if self.has_transaction_:
self.has_transaction_ = 0;
if self.transaction_ is not None: self.transaction_.Clear()
def has_transaction(self): return self.has_transaction_
def composite_index_size(self): return len(self.composite_index_)
def composite_index_list(self): return self.composite_index_
def composite_index(self, i):
return self.composite_index_[i]
def mutable_composite_index(self, i):
return self.composite_index_[i]
def add_composite_index(self):
x = CompositeIndex()
self.composite_index_.append(x)
return x
def clear_composite_index(self):
self.composite_index_ = []
def trusted(self): return self.trusted_
def set_trusted(self, x):
self.has_trusted_ = 1
self.trusted_ = x
def clear_trusted(self):
if self.has_trusted_:
self.has_trusted_ = 0
self.trusted_ = 0
def has_trusted(self): return self.has_trusted_
def force(self): return self.force_
def set_force(self, x):
self.has_force_ = 1
self.force_ = x
def clear_force(self):
if self.has_force_:
self.has_force_ = 0
self.force_ = 0
def has_force(self): return self.has_force_
def mark_changes(self): return self.mark_changes_
def set_mark_changes(self, x):
self.has_mark_changes_ = 1
self.mark_changes_ = x
def clear_mark_changes(self):
if self.has_mark_changes_:
self.has_mark_changes_ = 0
self.mark_changes_ = 0
def has_mark_changes(self): return self.has_mark_changes_
def snapshot_size(self): return len(self.snapshot_)
def snapshot_list(self): return self.snapshot_
def snapshot(self, i):
return self.snapshot_[i]
def mutable_snapshot(self, i):
return self.snapshot_[i]
def add_snapshot(self):
x = Snapshot()
self.snapshot_.append(x)
return x
def clear_snapshot(self):
self.snapshot_ = []
def auto_id_policy(self): return self.auto_id_policy_
def set_auto_id_policy(self, x):
self.has_auto_id_policy_ = 1
self.auto_id_policy_ = x
def clear_auto_id_policy(self):
if self.has_auto_id_policy_:
self.has_auto_id_policy_ = 0
self.auto_id_policy_ = 0
def has_auto_id_policy(self): return self.has_auto_id_policy_
def MergeFrom(self, x):
assert x is not self
if (x.has_header()): self.mutable_header().MergeFrom(x.header())
for i in xrange(x.entity_size()): self.add_entity().CopyFrom(x.entity(i))
if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction())
for i in xrange(x.composite_index_size()): self.add_composite_index().CopyFrom(x.composite_index(i))
if (x.has_trusted()): self.set_trusted(x.trusted())
if (x.has_force()): self.set_force(x.force())
if (x.has_mark_changes()): self.set_mark_changes(x.mark_changes())
for i in xrange(x.snapshot_size()): self.add_snapshot().CopyFrom(x.snapshot(i))
if (x.has_auto_id_policy()): self.set_auto_id_policy(x.auto_id_policy())
def Equals(self, x):
if x is self: return 1
if self.has_header_ != x.has_header_: return 0
if self.has_header_ and self.header_ != x.header_: return 0
if len(self.entity_) != len(x.entity_): return 0
for e1, e2 in zip(self.entity_, x.entity_):
if e1 != e2: return 0
if self.has_transaction_ != x.has_transaction_: return 0
if self.has_transaction_ and self.transaction_ != x.transaction_: return 0
if len(self.composite_index_) != len(x.composite_index_): return 0
for e1, e2 in zip(self.composite_index_, x.composite_index_):
if e1 != e2: return 0
if self.has_trusted_ != x.has_trusted_: return 0
if self.has_trusted_ and self.trusted_ != x.trusted_: return 0
if self.has_force_ != x.has_force_: return 0
if self.has_force_ and self.force_ != x.force_: return 0
if self.has_mark_changes_ != x.has_mark_changes_: return 0
if self.has_mark_changes_ and self.mark_changes_ != x.mark_changes_: return 0
if len(self.snapshot_) != len(x.snapshot_): return 0
for e1, e2 in zip(self.snapshot_, x.snapshot_):
if e1 != e2: return 0
if self.has_auto_id_policy_ != x.has_auto_id_policy_: return 0
if self.has_auto_id_policy_ and self.auto_id_policy_ != x.auto_id_policy_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_header_ and not self.header_.IsInitialized(debug_strs)): initialized = 0
for p in self.entity_:
if not p.IsInitialized(debug_strs): initialized=0
if (self.has_transaction_ and not self.transaction_.IsInitialized(debug_strs)): initialized = 0
for p in self.composite_index_:
if not p.IsInitialized(debug_strs): initialized=0
for p in self.snapshot_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSize())
n += 1 * len(self.entity_)
for i in xrange(len(self.entity_)): n += self.lengthString(self.entity_[i].ByteSize())
if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSize())
n += 1 * len(self.composite_index_)
for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSize())
if (self.has_trusted_): n += 2
if (self.has_force_): n += 2
if (self.has_mark_changes_): n += 2
n += 1 * len(self.snapshot_)
for i in xrange(len(self.snapshot_)): n += self.lengthString(self.snapshot_[i].ByteSize())
if (self.has_auto_id_policy_): n += 1 + self.lengthVarInt64(self.auto_id_policy_)
return n
def ByteSizePartial(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSizePartial())
n += 1 * len(self.entity_)
for i in xrange(len(self.entity_)): n += self.lengthString(self.entity_[i].ByteSizePartial())
if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSizePartial())
n += 1 * len(self.composite_index_)
for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSizePartial())
if (self.has_trusted_): n += 2
if (self.has_force_): n += 2
if (self.has_mark_changes_): n += 2
n += 1 * len(self.snapshot_)
for i in xrange(len(self.snapshot_)): n += self.lengthString(self.snapshot_[i].ByteSizePartial())
if (self.has_auto_id_policy_): n += 1 + self.lengthVarInt64(self.auto_id_policy_)
return n
def Clear(self):
self.clear_header()
self.clear_entity()
self.clear_transaction()
self.clear_composite_index()
self.clear_trusted()
self.clear_force()
self.clear_mark_changes()
self.clear_snapshot()
self.clear_auto_id_policy()
def OutputUnchecked(self, out):
for i in xrange(len(self.entity_)):
out.putVarInt32(10)
out.putVarInt32(self.entity_[i].ByteSize())
self.entity_[i].OutputUnchecked(out)
if (self.has_transaction_):
out.putVarInt32(18)
out.putVarInt32(self.transaction_.ByteSize())
self.transaction_.OutputUnchecked(out)
for i in xrange(len(self.composite_index_)):
out.putVarInt32(26)
out.putVarInt32(self.composite_index_[i].ByteSize())
self.composite_index_[i].OutputUnchecked(out)
if (self.has_trusted_):
out.putVarInt32(32)
out.putBoolean(self.trusted_)
if (self.has_force_):
out.putVarInt32(56)
out.putBoolean(self.force_)
if (self.has_mark_changes_):
out.putVarInt32(64)
out.putBoolean(self.mark_changes_)
for i in xrange(len(self.snapshot_)):
out.putVarInt32(74)
out.putVarInt32(self.snapshot_[i].ByteSize())
self.snapshot_[i].OutputUnchecked(out)
if (self.has_auto_id_policy_):
out.putVarInt32(80)
out.putVarInt32(self.auto_id_policy_)
if (self.has_header_):
out.putVarInt32(90)
out.putVarInt32(self.header_.ByteSize())
self.header_.OutputUnchecked(out)
def OutputPartial(self, out):
for i in xrange(len(self.entity_)):
out.putVarInt32(10)
out.putVarInt32(self.entity_[i].ByteSizePartial())
self.entity_[i].OutputPartial(out)
if (self.has_transaction_):
out.putVarInt32(18)
out.putVarInt32(self.transaction_.ByteSizePartial())
self.transaction_.OutputPartial(out)
for i in xrange(len(self.composite_index_)):
out.putVarInt32(26)
out.putVarInt32(self.composite_index_[i].ByteSizePartial())
self.composite_index_[i].OutputPartial(out)
if (self.has_trusted_):
out.putVarInt32(32)
out.putBoolean(self.trusted_)
if (self.has_force_):
out.putVarInt32(56)
out.putBoolean(self.force_)
if (self.has_mark_changes_):
out.putVarInt32(64)
out.putBoolean(self.mark_changes_)
for i in xrange(len(self.snapshot_)):
out.putVarInt32(74)
out.putVarInt32(self.snapshot_[i].ByteSizePartial())
self.snapshot_[i].OutputPartial(out)
if (self.has_auto_id_policy_):
out.putVarInt32(80)
out.putVarInt32(self.auto_id_policy_)
if (self.has_header_):
out.putVarInt32(90)
out.putVarInt32(self.header_.ByteSizePartial())
self.header_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_entity().TryMerge(tmp)
continue
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_transaction().TryMerge(tmp)
continue
if tt == 26:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_composite_index().TryMerge(tmp)
continue
if tt == 32:
self.set_trusted(d.getBoolean())
continue
if tt == 56:
self.set_force(d.getBoolean())
continue
if tt == 64:
self.set_mark_changes(d.getBoolean())
continue
if tt == 74:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_snapshot().TryMerge(tmp)
continue
if tt == 80:
self.set_auto_id_policy(d.getVarInt32())
continue
if tt == 90:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_header().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_header_:
res+=prefix+"header <\n"
res+=self.header_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.entity_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("entity%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_transaction_:
res+=prefix+"transaction <\n"
res+=self.transaction_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.composite_index_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("composite_index%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_trusted_: res+=prefix+("trusted: %s\n" % self.DebugFormatBool(self.trusted_))
if self.has_force_: res+=prefix+("force: %s\n" % self.DebugFormatBool(self.force_))
if self.has_mark_changes_: res+=prefix+("mark_changes: %s\n" % self.DebugFormatBool(self.mark_changes_))
cnt=0
for e in self.snapshot_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("snapshot%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_auto_id_policy_: res+=prefix+("auto_id_policy: %s\n" % self.DebugFormatInt32(self.auto_id_policy_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kheader = 11
kentity = 1
ktransaction = 2
kcomposite_index = 3
ktrusted = 4
kforce = 7
kmark_changes = 8
ksnapshot = 9
kauto_id_policy = 10
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "entity",
2: "transaction",
3: "composite_index",
4: "trusted",
7: "force",
8: "mark_changes",
9: "snapshot",
10: "auto_id_policy",
11: "header",
}, 11)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.STRING,
4: ProtocolBuffer.Encoder.NUMERIC,
7: ProtocolBuffer.Encoder.NUMERIC,
8: ProtocolBuffer.Encoder.NUMERIC,
9: ProtocolBuffer.Encoder.STRING,
10: ProtocolBuffer.Encoder.NUMERIC,
11: ProtocolBuffer.Encoder.STRING,
}, 11, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.PutRequest'
class PutResponse(ProtocolBuffer.ProtocolMessage):
has_cost_ = 0
cost_ = None
def __init__(self, contents=None):
self.key_ = []
self.version_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def key_size(self): return len(self.key_)
def key_list(self): return self.key_
def key(self, i):
return self.key_[i]
def mutable_key(self, i):
return self.key_[i]
def add_key(self):
x = Reference()
self.key_.append(x)
return x
def clear_key(self):
self.key_ = []
def cost(self):
if self.cost_ is None:
self.lazy_init_lock_.acquire()
try:
if self.cost_ is None: self.cost_ = Cost()
finally:
self.lazy_init_lock_.release()
return self.cost_
def mutable_cost(self): self.has_cost_ = 1; return self.cost()
def clear_cost(self):
if self.has_cost_:
self.has_cost_ = 0;
if self.cost_ is not None: self.cost_.Clear()
def has_cost(self): return self.has_cost_
def version_size(self): return len(self.version_)
def version_list(self): return self.version_
def version(self, i):
return self.version_[i]
def set_version(self, i, x):
self.version_[i] = x
def add_version(self, x):
self.version_.append(x)
def clear_version(self):
self.version_ = []
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.key_size()): self.add_key().CopyFrom(x.key(i))
if (x.has_cost()): self.mutable_cost().MergeFrom(x.cost())
for i in xrange(x.version_size()): self.add_version(x.version(i))
def Equals(self, x):
if x is self: return 1
if len(self.key_) != len(x.key_): return 0
for e1, e2 in zip(self.key_, x.key_):
if e1 != e2: return 0
if self.has_cost_ != x.has_cost_: return 0
if self.has_cost_ and self.cost_ != x.cost_: return 0
if len(self.version_) != len(x.version_): return 0
for e1, e2 in zip(self.version_, x.version_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.key_:
if not p.IsInitialized(debug_strs): initialized=0
if (self.has_cost_ and not self.cost_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += 1 * len(self.key_)
for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSize())
if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSize())
n += 1 * len(self.version_)
for i in xrange(len(self.version_)): n += self.lengthVarInt64(self.version_[i])
return n
def ByteSizePartial(self):
n = 0
n += 1 * len(self.key_)
for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSizePartial())
if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSizePartial())
n += 1 * len(self.version_)
for i in xrange(len(self.version_)): n += self.lengthVarInt64(self.version_[i])
return n
def Clear(self):
self.clear_key()
self.clear_cost()
self.clear_version()
def OutputUnchecked(self, out):
for i in xrange(len(self.key_)):
out.putVarInt32(10)
out.putVarInt32(self.key_[i].ByteSize())
self.key_[i].OutputUnchecked(out)
if (self.has_cost_):
out.putVarInt32(18)
out.putVarInt32(self.cost_.ByteSize())
self.cost_.OutputUnchecked(out)
for i in xrange(len(self.version_)):
out.putVarInt32(24)
out.putVarInt64(self.version_[i])
def OutputPartial(self, out):
for i in xrange(len(self.key_)):
out.putVarInt32(10)
out.putVarInt32(self.key_[i].ByteSizePartial())
self.key_[i].OutputPartial(out)
if (self.has_cost_):
out.putVarInt32(18)
out.putVarInt32(self.cost_.ByteSizePartial())
self.cost_.OutputPartial(out)
for i in xrange(len(self.version_)):
out.putVarInt32(24)
out.putVarInt64(self.version_[i])
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_key().TryMerge(tmp)
continue
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_cost().TryMerge(tmp)
continue
if tt == 24:
self.add_version(d.getVarInt64())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.key_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("key%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_cost_:
res+=prefix+"cost <\n"
res+=self.cost_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.version_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("version%s: %s\n" % (elm, self.DebugFormatInt64(e)))
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kkey = 1
kcost = 2
kversion = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "key",
2: "cost",
3: "version",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.NUMERIC,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.PutResponse'
class TouchRequest(ProtocolBuffer.ProtocolMessage):
has_header_ = 0
header_ = None
has_force_ = 0
force_ = 0
def __init__(self, contents=None):
self.key_ = []
self.composite_index_ = []
self.snapshot_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def header(self):
if self.header_ is None:
self.lazy_init_lock_.acquire()
try:
if self.header_ is None: self.header_ = InternalHeader()
finally:
self.lazy_init_lock_.release()
return self.header_
def mutable_header(self): self.has_header_ = 1; return self.header()
def clear_header(self):
if self.has_header_:
self.has_header_ = 0;
if self.header_ is not None: self.header_.Clear()
def has_header(self): return self.has_header_
def key_size(self): return len(self.key_)
def key_list(self): return self.key_
def key(self, i):
return self.key_[i]
def mutable_key(self, i):
return self.key_[i]
def add_key(self):
x = Reference()
self.key_.append(x)
return x
def clear_key(self):
self.key_ = []
def composite_index_size(self): return len(self.composite_index_)
def composite_index_list(self): return self.composite_index_
def composite_index(self, i):
return self.composite_index_[i]
def mutable_composite_index(self, i):
return self.composite_index_[i]
def add_composite_index(self):
x = CompositeIndex()
self.composite_index_.append(x)
return x
def clear_composite_index(self):
self.composite_index_ = []
def force(self): return self.force_
def set_force(self, x):
self.has_force_ = 1
self.force_ = x
def clear_force(self):
if self.has_force_:
self.has_force_ = 0
self.force_ = 0
def has_force(self): return self.has_force_
def snapshot_size(self): return len(self.snapshot_)
def snapshot_list(self): return self.snapshot_
def snapshot(self, i):
return self.snapshot_[i]
def mutable_snapshot(self, i):
return self.snapshot_[i]
def add_snapshot(self):
x = Snapshot()
self.snapshot_.append(x)
return x
def clear_snapshot(self):
self.snapshot_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_header()): self.mutable_header().MergeFrom(x.header())
for i in xrange(x.key_size()): self.add_key().CopyFrom(x.key(i))
for i in xrange(x.composite_index_size()): self.add_composite_index().CopyFrom(x.composite_index(i))
if (x.has_force()): self.set_force(x.force())
for i in xrange(x.snapshot_size()): self.add_snapshot().CopyFrom(x.snapshot(i))
def Equals(self, x):
if x is self: return 1
if self.has_header_ != x.has_header_: return 0
if self.has_header_ and self.header_ != x.header_: return 0
if len(self.key_) != len(x.key_): return 0
for e1, e2 in zip(self.key_, x.key_):
if e1 != e2: return 0
if len(self.composite_index_) != len(x.composite_index_): return 0
for e1, e2 in zip(self.composite_index_, x.composite_index_):
if e1 != e2: return 0
if self.has_force_ != x.has_force_: return 0
if self.has_force_ and self.force_ != x.force_: return 0
if len(self.snapshot_) != len(x.snapshot_): return 0
for e1, e2 in zip(self.snapshot_, x.snapshot_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_header_ and not self.header_.IsInitialized(debug_strs)): initialized = 0
for p in self.key_:
if not p.IsInitialized(debug_strs): initialized=0
for p in self.composite_index_:
if not p.IsInitialized(debug_strs): initialized=0
for p in self.snapshot_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSize())
n += 1 * len(self.key_)
for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSize())
n += 1 * len(self.composite_index_)
for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSize())
if (self.has_force_): n += 2
n += 1 * len(self.snapshot_)
for i in xrange(len(self.snapshot_)): n += self.lengthString(self.snapshot_[i].ByteSize())
return n
def ByteSizePartial(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSizePartial())
n += 1 * len(self.key_)
for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSizePartial())
n += 1 * len(self.composite_index_)
for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSizePartial())
if (self.has_force_): n += 2
n += 1 * len(self.snapshot_)
for i in xrange(len(self.snapshot_)): n += self.lengthString(self.snapshot_[i].ByteSizePartial())
return n
def Clear(self):
self.clear_header()
self.clear_key()
self.clear_composite_index()
self.clear_force()
self.clear_snapshot()
def OutputUnchecked(self, out):
for i in xrange(len(self.key_)):
out.putVarInt32(10)
out.putVarInt32(self.key_[i].ByteSize())
self.key_[i].OutputUnchecked(out)
for i in xrange(len(self.composite_index_)):
out.putVarInt32(18)
out.putVarInt32(self.composite_index_[i].ByteSize())
self.composite_index_[i].OutputUnchecked(out)
if (self.has_force_):
out.putVarInt32(24)
out.putBoolean(self.force_)
for i in xrange(len(self.snapshot_)):
out.putVarInt32(74)
out.putVarInt32(self.snapshot_[i].ByteSize())
self.snapshot_[i].OutputUnchecked(out)
if (self.has_header_):
out.putVarInt32(82)
out.putVarInt32(self.header_.ByteSize())
self.header_.OutputUnchecked(out)
def OutputPartial(self, out):
for i in xrange(len(self.key_)):
out.putVarInt32(10)
out.putVarInt32(self.key_[i].ByteSizePartial())
self.key_[i].OutputPartial(out)
for i in xrange(len(self.composite_index_)):
out.putVarInt32(18)
out.putVarInt32(self.composite_index_[i].ByteSizePartial())
self.composite_index_[i].OutputPartial(out)
if (self.has_force_):
out.putVarInt32(24)
out.putBoolean(self.force_)
for i in xrange(len(self.snapshot_)):
out.putVarInt32(74)
out.putVarInt32(self.snapshot_[i].ByteSizePartial())
self.snapshot_[i].OutputPartial(out)
if (self.has_header_):
out.putVarInt32(82)
out.putVarInt32(self.header_.ByteSizePartial())
self.header_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_key().TryMerge(tmp)
continue
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_composite_index().TryMerge(tmp)
continue
if tt == 24:
self.set_force(d.getBoolean())
continue
if tt == 74:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_snapshot().TryMerge(tmp)
continue
if tt == 82:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_header().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_header_:
res+=prefix+"header <\n"
res+=self.header_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.key_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("key%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
cnt=0
for e in self.composite_index_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("composite_index%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_force_: res+=prefix+("force: %s\n" % self.DebugFormatBool(self.force_))
cnt=0
for e in self.snapshot_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("snapshot%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kheader = 10
kkey = 1
kcomposite_index = 2
kforce = 3
ksnapshot = 9
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "key",
2: "composite_index",
3: "force",
9: "snapshot",
10: "header",
}, 10)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.NUMERIC,
9: ProtocolBuffer.Encoder.STRING,
10: ProtocolBuffer.Encoder.STRING,
}, 10, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.TouchRequest'
class TouchResponse(ProtocolBuffer.ProtocolMessage):
has_cost_ = 0
cost_ = None
def __init__(self, contents=None):
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def cost(self):
if self.cost_ is None:
self.lazy_init_lock_.acquire()
try:
if self.cost_ is None: self.cost_ = Cost()
finally:
self.lazy_init_lock_.release()
return self.cost_
def mutable_cost(self): self.has_cost_ = 1; return self.cost()
def clear_cost(self):
if self.has_cost_:
self.has_cost_ = 0;
if self.cost_ is not None: self.cost_.Clear()
def has_cost(self): return self.has_cost_
def MergeFrom(self, x):
assert x is not self
if (x.has_cost()): self.mutable_cost().MergeFrom(x.cost())
def Equals(self, x):
if x is self: return 1
if self.has_cost_ != x.has_cost_: return 0
if self.has_cost_ and self.cost_ != x.cost_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_cost_ and not self.cost_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSize())
return n
def ByteSizePartial(self):
n = 0
if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSizePartial())
return n
def Clear(self):
self.clear_cost()
def OutputUnchecked(self, out):
if (self.has_cost_):
out.putVarInt32(10)
out.putVarInt32(self.cost_.ByteSize())
self.cost_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_cost_):
out.putVarInt32(10)
out.putVarInt32(self.cost_.ByteSizePartial())
self.cost_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_cost().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_cost_:
res+=prefix+"cost <\n"
res+=self.cost_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kcost = 1
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "cost",
}, 1)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
}, 1, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.TouchResponse'
class DeleteRequest(ProtocolBuffer.ProtocolMessage):
has_header_ = 0
header_ = None
has_transaction_ = 0
transaction_ = None
has_trusted_ = 0
trusted_ = 0
has_force_ = 0
force_ = 0
has_mark_changes_ = 0
mark_changes_ = 0
def __init__(self, contents=None):
self.key_ = []
self.composite_index_ = []
self.snapshot_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def header(self):
if self.header_ is None:
self.lazy_init_lock_.acquire()
try:
if self.header_ is None: self.header_ = InternalHeader()
finally:
self.lazy_init_lock_.release()
return self.header_
def mutable_header(self): self.has_header_ = 1; return self.header()
def clear_header(self):
if self.has_header_:
self.has_header_ = 0;
if self.header_ is not None: self.header_.Clear()
def has_header(self): return self.has_header_
def key_size(self): return len(self.key_)
def key_list(self): return self.key_
def key(self, i):
return self.key_[i]
def mutable_key(self, i):
return self.key_[i]
def add_key(self):
x = Reference()
self.key_.append(x)
return x
def clear_key(self):
self.key_ = []
def transaction(self):
if self.transaction_ is None:
self.lazy_init_lock_.acquire()
try:
if self.transaction_ is None: self.transaction_ = Transaction()
finally:
self.lazy_init_lock_.release()
return self.transaction_
def mutable_transaction(self): self.has_transaction_ = 1; return self.transaction()
def clear_transaction(self):
if self.has_transaction_:
self.has_transaction_ = 0;
if self.transaction_ is not None: self.transaction_.Clear()
def has_transaction(self): return self.has_transaction_
def composite_index_size(self): return len(self.composite_index_)
def composite_index_list(self): return self.composite_index_
def composite_index(self, i):
return self.composite_index_[i]
def mutable_composite_index(self, i):
return self.composite_index_[i]
def add_composite_index(self):
x = CompositeIndex()
self.composite_index_.append(x)
return x
def clear_composite_index(self):
self.composite_index_ = []
def trusted(self): return self.trusted_
def set_trusted(self, x):
self.has_trusted_ = 1
self.trusted_ = x
def clear_trusted(self):
if self.has_trusted_:
self.has_trusted_ = 0
self.trusted_ = 0
def has_trusted(self): return self.has_trusted_
def force(self): return self.force_
def set_force(self, x):
self.has_force_ = 1
self.force_ = x
def clear_force(self):
if self.has_force_:
self.has_force_ = 0
self.force_ = 0
def has_force(self): return self.has_force_
def mark_changes(self): return self.mark_changes_
def set_mark_changes(self, x):
self.has_mark_changes_ = 1
self.mark_changes_ = x
def clear_mark_changes(self):
if self.has_mark_changes_:
self.has_mark_changes_ = 0
self.mark_changes_ = 0
def has_mark_changes(self): return self.has_mark_changes_
def snapshot_size(self): return len(self.snapshot_)
def snapshot_list(self): return self.snapshot_
def snapshot(self, i):
return self.snapshot_[i]
def mutable_snapshot(self, i):
return self.snapshot_[i]
def add_snapshot(self):
x = Snapshot()
self.snapshot_.append(x)
return x
def clear_snapshot(self):
self.snapshot_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_header()): self.mutable_header().MergeFrom(x.header())
for i in xrange(x.key_size()): self.add_key().CopyFrom(x.key(i))
if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction())
for i in xrange(x.composite_index_size()): self.add_composite_index().CopyFrom(x.composite_index(i))
if (x.has_trusted()): self.set_trusted(x.trusted())
if (x.has_force()): self.set_force(x.force())
if (x.has_mark_changes()): self.set_mark_changes(x.mark_changes())
for i in xrange(x.snapshot_size()): self.add_snapshot().CopyFrom(x.snapshot(i))
def Equals(self, x):
if x is self: return 1
if self.has_header_ != x.has_header_: return 0
if self.has_header_ and self.header_ != x.header_: return 0
if len(self.key_) != len(x.key_): return 0
for e1, e2 in zip(self.key_, x.key_):
if e1 != e2: return 0
if self.has_transaction_ != x.has_transaction_: return 0
if self.has_transaction_ and self.transaction_ != x.transaction_: return 0
if len(self.composite_index_) != len(x.composite_index_): return 0
for e1, e2 in zip(self.composite_index_, x.composite_index_):
if e1 != e2: return 0
if self.has_trusted_ != x.has_trusted_: return 0
if self.has_trusted_ and self.trusted_ != x.trusted_: return 0
if self.has_force_ != x.has_force_: return 0
if self.has_force_ and self.force_ != x.force_: return 0
if self.has_mark_changes_ != x.has_mark_changes_: return 0
if self.has_mark_changes_ and self.mark_changes_ != x.mark_changes_: return 0
if len(self.snapshot_) != len(x.snapshot_): return 0
for e1, e2 in zip(self.snapshot_, x.snapshot_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_header_ and not self.header_.IsInitialized(debug_strs)): initialized = 0
for p in self.key_:
if not p.IsInitialized(debug_strs): initialized=0
if (self.has_transaction_ and not self.transaction_.IsInitialized(debug_strs)): initialized = 0
for p in self.composite_index_:
if not p.IsInitialized(debug_strs): initialized=0
for p in self.snapshot_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSize())
n += 1 * len(self.key_)
for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSize())
if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSize())
n += 1 * len(self.composite_index_)
for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSize())
if (self.has_trusted_): n += 2
if (self.has_force_): n += 2
if (self.has_mark_changes_): n += 2
n += 1 * len(self.snapshot_)
for i in xrange(len(self.snapshot_)): n += self.lengthString(self.snapshot_[i].ByteSize())
return n
def ByteSizePartial(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSizePartial())
n += 1 * len(self.key_)
for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSizePartial())
if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSizePartial())
n += 1 * len(self.composite_index_)
for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSizePartial())
if (self.has_trusted_): n += 2
if (self.has_force_): n += 2
if (self.has_mark_changes_): n += 2
n += 1 * len(self.snapshot_)
for i in xrange(len(self.snapshot_)): n += self.lengthString(self.snapshot_[i].ByteSizePartial())
return n
def Clear(self):
self.clear_header()
self.clear_key()
self.clear_transaction()
self.clear_composite_index()
self.clear_trusted()
self.clear_force()
self.clear_mark_changes()
self.clear_snapshot()
def OutputUnchecked(self, out):
if (self.has_trusted_):
out.putVarInt32(32)
out.putBoolean(self.trusted_)
if (self.has_transaction_):
out.putVarInt32(42)
out.putVarInt32(self.transaction_.ByteSize())
self.transaction_.OutputUnchecked(out)
for i in xrange(len(self.key_)):
out.putVarInt32(50)
out.putVarInt32(self.key_[i].ByteSize())
self.key_[i].OutputUnchecked(out)
if (self.has_force_):
out.putVarInt32(56)
out.putBoolean(self.force_)
if (self.has_mark_changes_):
out.putVarInt32(64)
out.putBoolean(self.mark_changes_)
for i in xrange(len(self.snapshot_)):
out.putVarInt32(74)
out.putVarInt32(self.snapshot_[i].ByteSize())
self.snapshot_[i].OutputUnchecked(out)
if (self.has_header_):
out.putVarInt32(82)
out.putVarInt32(self.header_.ByteSize())
self.header_.OutputUnchecked(out)
for i in xrange(len(self.composite_index_)):
out.putVarInt32(90)
out.putVarInt32(self.composite_index_[i].ByteSize())
self.composite_index_[i].OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_trusted_):
out.putVarInt32(32)
out.putBoolean(self.trusted_)
if (self.has_transaction_):
out.putVarInt32(42)
out.putVarInt32(self.transaction_.ByteSizePartial())
self.transaction_.OutputPartial(out)
for i in xrange(len(self.key_)):
out.putVarInt32(50)
out.putVarInt32(self.key_[i].ByteSizePartial())
self.key_[i].OutputPartial(out)
if (self.has_force_):
out.putVarInt32(56)
out.putBoolean(self.force_)
if (self.has_mark_changes_):
out.putVarInt32(64)
out.putBoolean(self.mark_changes_)
for i in xrange(len(self.snapshot_)):
out.putVarInt32(74)
out.putVarInt32(self.snapshot_[i].ByteSizePartial())
self.snapshot_[i].OutputPartial(out)
if (self.has_header_):
out.putVarInt32(82)
out.putVarInt32(self.header_.ByteSizePartial())
self.header_.OutputPartial(out)
for i in xrange(len(self.composite_index_)):
out.putVarInt32(90)
out.putVarInt32(self.composite_index_[i].ByteSizePartial())
self.composite_index_[i].OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 32:
self.set_trusted(d.getBoolean())
continue
if tt == 42:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_transaction().TryMerge(tmp)
continue
if tt == 50:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_key().TryMerge(tmp)
continue
if tt == 56:
self.set_force(d.getBoolean())
continue
if tt == 64:
self.set_mark_changes(d.getBoolean())
continue
if tt == 74:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_snapshot().TryMerge(tmp)
continue
if tt == 82:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_header().TryMerge(tmp)
continue
if tt == 90:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_composite_index().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_header_:
res+=prefix+"header <\n"
res+=self.header_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.key_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("key%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_transaction_:
res+=prefix+"transaction <\n"
res+=self.transaction_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.composite_index_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("composite_index%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_trusted_: res+=prefix+("trusted: %s\n" % self.DebugFormatBool(self.trusted_))
if self.has_force_: res+=prefix+("force: %s\n" % self.DebugFormatBool(self.force_))
if self.has_mark_changes_: res+=prefix+("mark_changes: %s\n" % self.DebugFormatBool(self.mark_changes_))
cnt=0
for e in self.snapshot_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("snapshot%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kheader = 10
kkey = 6
ktransaction = 5
kcomposite_index = 11
ktrusted = 4
kforce = 7
kmark_changes = 8
ksnapshot = 9
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
4: "trusted",
5: "transaction",
6: "key",
7: "force",
8: "mark_changes",
9: "snapshot",
10: "header",
11: "composite_index",
}, 11)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.NUMERIC,
5: ProtocolBuffer.Encoder.STRING,
6: ProtocolBuffer.Encoder.STRING,
7: ProtocolBuffer.Encoder.NUMERIC,
8: ProtocolBuffer.Encoder.NUMERIC,
9: ProtocolBuffer.Encoder.STRING,
10: ProtocolBuffer.Encoder.STRING,
11: ProtocolBuffer.Encoder.STRING,
}, 11, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.DeleteRequest'
class DeleteResponse(ProtocolBuffer.ProtocolMessage):
has_cost_ = 0
cost_ = None
def __init__(self, contents=None):
self.version_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def cost(self):
if self.cost_ is None:
self.lazy_init_lock_.acquire()
try:
if self.cost_ is None: self.cost_ = Cost()
finally:
self.lazy_init_lock_.release()
return self.cost_
def mutable_cost(self): self.has_cost_ = 1; return self.cost()
def clear_cost(self):
if self.has_cost_:
self.has_cost_ = 0;
if self.cost_ is not None: self.cost_.Clear()
def has_cost(self): return self.has_cost_
def version_size(self): return len(self.version_)
def version_list(self): return self.version_
def version(self, i):
return self.version_[i]
def set_version(self, i, x):
self.version_[i] = x
def add_version(self, x):
self.version_.append(x)
def clear_version(self):
self.version_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_cost()): self.mutable_cost().MergeFrom(x.cost())
for i in xrange(x.version_size()): self.add_version(x.version(i))
def Equals(self, x):
if x is self: return 1
if self.has_cost_ != x.has_cost_: return 0
if self.has_cost_ and self.cost_ != x.cost_: return 0
if len(self.version_) != len(x.version_): return 0
for e1, e2 in zip(self.version_, x.version_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_cost_ and not self.cost_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSize())
n += 1 * len(self.version_)
for i in xrange(len(self.version_)): n += self.lengthVarInt64(self.version_[i])
return n
def ByteSizePartial(self):
n = 0
if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSizePartial())
n += 1 * len(self.version_)
for i in xrange(len(self.version_)): n += self.lengthVarInt64(self.version_[i])
return n
def Clear(self):
self.clear_cost()
self.clear_version()
def OutputUnchecked(self, out):
if (self.has_cost_):
out.putVarInt32(10)
out.putVarInt32(self.cost_.ByteSize())
self.cost_.OutputUnchecked(out)
for i in xrange(len(self.version_)):
out.putVarInt32(24)
out.putVarInt64(self.version_[i])
def OutputPartial(self, out):
if (self.has_cost_):
out.putVarInt32(10)
out.putVarInt32(self.cost_.ByteSizePartial())
self.cost_.OutputPartial(out)
for i in xrange(len(self.version_)):
out.putVarInt32(24)
out.putVarInt64(self.version_[i])
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_cost().TryMerge(tmp)
continue
if tt == 24:
self.add_version(d.getVarInt64())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_cost_:
res+=prefix+"cost <\n"
res+=self.cost_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.version_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("version%s: %s\n" % (elm, self.DebugFormatInt64(e)))
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kcost = 1
kversion = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "cost",
3: "version",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.NUMERIC,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.DeleteResponse'
class NextRequest(ProtocolBuffer.ProtocolMessage):
has_header_ = 0
header_ = None
has_cursor_ = 0
has_count_ = 0
count_ = 0
has_offset_ = 0
offset_ = 0
has_compile_ = 0
compile_ = 0
def __init__(self, contents=None):
self.cursor_ = Cursor()
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def header(self):
if self.header_ is None:
self.lazy_init_lock_.acquire()
try:
if self.header_ is None: self.header_ = InternalHeader()
finally:
self.lazy_init_lock_.release()
return self.header_
def mutable_header(self): self.has_header_ = 1; return self.header()
def clear_header(self):
if self.has_header_:
self.has_header_ = 0;
if self.header_ is not None: self.header_.Clear()
def has_header(self): return self.has_header_
def cursor(self): return self.cursor_
def mutable_cursor(self): self.has_cursor_ = 1; return self.cursor_
def clear_cursor(self):self.has_cursor_ = 0; self.cursor_.Clear()
def has_cursor(self): return self.has_cursor_
def count(self): return self.count_
def set_count(self, x):
self.has_count_ = 1
self.count_ = x
def clear_count(self):
if self.has_count_:
self.has_count_ = 0
self.count_ = 0
def has_count(self): return self.has_count_
def offset(self): return self.offset_
def set_offset(self, x):
self.has_offset_ = 1
self.offset_ = x
def clear_offset(self):
if self.has_offset_:
self.has_offset_ = 0
self.offset_ = 0
def has_offset(self): return self.has_offset_
def compile(self): return self.compile_
def set_compile(self, x):
self.has_compile_ = 1
self.compile_ = x
def clear_compile(self):
if self.has_compile_:
self.has_compile_ = 0
self.compile_ = 0
def has_compile(self): return self.has_compile_
def MergeFrom(self, x):
assert x is not self
if (x.has_header()): self.mutable_header().MergeFrom(x.header())
if (x.has_cursor()): self.mutable_cursor().MergeFrom(x.cursor())
if (x.has_count()): self.set_count(x.count())
if (x.has_offset()): self.set_offset(x.offset())
if (x.has_compile()): self.set_compile(x.compile())
def Equals(self, x):
if x is self: return 1
if self.has_header_ != x.has_header_: return 0
if self.has_header_ and self.header_ != x.header_: return 0
if self.has_cursor_ != x.has_cursor_: return 0
if self.has_cursor_ and self.cursor_ != x.cursor_: return 0
if self.has_count_ != x.has_count_: return 0
if self.has_count_ and self.count_ != x.count_: return 0
if self.has_offset_ != x.has_offset_: return 0
if self.has_offset_ and self.offset_ != x.offset_: return 0
if self.has_compile_ != x.has_compile_: return 0
if self.has_compile_ and self.compile_ != x.compile_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_header_ and not self.header_.IsInitialized(debug_strs)): initialized = 0
if (not self.has_cursor_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: cursor not set.')
elif not self.cursor_.IsInitialized(debug_strs): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSize())
n += self.lengthString(self.cursor_.ByteSize())
if (self.has_count_): n += 1 + self.lengthVarInt64(self.count_)
if (self.has_offset_): n += 1 + self.lengthVarInt64(self.offset_)
if (self.has_compile_): n += 2
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSizePartial())
if (self.has_cursor_):
n += 1
n += self.lengthString(self.cursor_.ByteSizePartial())
if (self.has_count_): n += 1 + self.lengthVarInt64(self.count_)
if (self.has_offset_): n += 1 + self.lengthVarInt64(self.offset_)
if (self.has_compile_): n += 2
return n
def Clear(self):
self.clear_header()
self.clear_cursor()
self.clear_count()
self.clear_offset()
self.clear_compile()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putVarInt32(self.cursor_.ByteSize())
self.cursor_.OutputUnchecked(out)
if (self.has_count_):
out.putVarInt32(16)
out.putVarInt32(self.count_)
if (self.has_compile_):
out.putVarInt32(24)
out.putBoolean(self.compile_)
if (self.has_offset_):
out.putVarInt32(32)
out.putVarInt32(self.offset_)
if (self.has_header_):
out.putVarInt32(42)
out.putVarInt32(self.header_.ByteSize())
self.header_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_cursor_):
out.putVarInt32(10)
out.putVarInt32(self.cursor_.ByteSizePartial())
self.cursor_.OutputPartial(out)
if (self.has_count_):
out.putVarInt32(16)
out.putVarInt32(self.count_)
if (self.has_compile_):
out.putVarInt32(24)
out.putBoolean(self.compile_)
if (self.has_offset_):
out.putVarInt32(32)
out.putVarInt32(self.offset_)
if (self.has_header_):
out.putVarInt32(42)
out.putVarInt32(self.header_.ByteSizePartial())
self.header_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_cursor().TryMerge(tmp)
continue
if tt == 16:
self.set_count(d.getVarInt32())
continue
if tt == 24:
self.set_compile(d.getBoolean())
continue
if tt == 32:
self.set_offset(d.getVarInt32())
continue
if tt == 42:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_header().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_header_:
res+=prefix+"header <\n"
res+=self.header_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_cursor_:
res+=prefix+"cursor <\n"
res+=self.cursor_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_count_: res+=prefix+("count: %s\n" % self.DebugFormatInt32(self.count_))
if self.has_offset_: res+=prefix+("offset: %s\n" % self.DebugFormatInt32(self.offset_))
if self.has_compile_: res+=prefix+("compile: %s\n" % self.DebugFormatBool(self.compile_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kheader = 5
kcursor = 1
kcount = 2
koffset = 4
kcompile = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "cursor",
2: "count",
3: "compile",
4: "offset",
5: "header",
}, 5)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.NUMERIC,
5: ProtocolBuffer.Encoder.STRING,
}, 5, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.NextRequest'
class QueryResult(ProtocolBuffer.ProtocolMessage):
has_cursor_ = 0
cursor_ = None
has_skipped_results_ = 0
skipped_results_ = 0
has_more_results_ = 0
more_results_ = 0
has_keys_only_ = 0
keys_only_ = 0
has_index_only_ = 0
index_only_ = 0
has_small_ops_ = 0
small_ops_ = 0
has_compiled_query_ = 0
compiled_query_ = None
has_compiled_cursor_ = 0
compiled_cursor_ = None
has_skipped_results_compiled_cursor_ = 0
skipped_results_compiled_cursor_ = None
def __init__(self, contents=None):
self.result_ = []
self.index_ = []
self.version_ = []
self.result_compiled_cursor_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def cursor(self):
if self.cursor_ is None:
self.lazy_init_lock_.acquire()
try:
if self.cursor_ is None: self.cursor_ = Cursor()
finally:
self.lazy_init_lock_.release()
return self.cursor_
def mutable_cursor(self): self.has_cursor_ = 1; return self.cursor()
def clear_cursor(self):
if self.has_cursor_:
self.has_cursor_ = 0;
if self.cursor_ is not None: self.cursor_.Clear()
def has_cursor(self): return self.has_cursor_
def result_size(self): return len(self.result_)
def result_list(self): return self.result_
def result(self, i):
return self.result_[i]
def mutable_result(self, i):
return self.result_[i]
def add_result(self):
x = EntityProto()
self.result_.append(x)
return x
def clear_result(self):
self.result_ = []
def skipped_results(self): return self.skipped_results_
def set_skipped_results(self, x):
self.has_skipped_results_ = 1
self.skipped_results_ = x
def clear_skipped_results(self):
if self.has_skipped_results_:
self.has_skipped_results_ = 0
self.skipped_results_ = 0
def has_skipped_results(self): return self.has_skipped_results_
def more_results(self): return self.more_results_
def set_more_results(self, x):
self.has_more_results_ = 1
self.more_results_ = x
def clear_more_results(self):
if self.has_more_results_:
self.has_more_results_ = 0
self.more_results_ = 0
def has_more_results(self): return self.has_more_results_
def keys_only(self): return self.keys_only_
def set_keys_only(self, x):
self.has_keys_only_ = 1
self.keys_only_ = x
def clear_keys_only(self):
if self.has_keys_only_:
self.has_keys_only_ = 0
self.keys_only_ = 0
def has_keys_only(self): return self.has_keys_only_
def index_only(self): return self.index_only_
def set_index_only(self, x):
self.has_index_only_ = 1
self.index_only_ = x
def clear_index_only(self):
if self.has_index_only_:
self.has_index_only_ = 0
self.index_only_ = 0
def has_index_only(self): return self.has_index_only_
def small_ops(self): return self.small_ops_
def set_small_ops(self, x):
self.has_small_ops_ = 1
self.small_ops_ = x
def clear_small_ops(self):
if self.has_small_ops_:
self.has_small_ops_ = 0
self.small_ops_ = 0
def has_small_ops(self): return self.has_small_ops_
def compiled_query(self):
if self.compiled_query_ is None:
self.lazy_init_lock_.acquire()
try:
if self.compiled_query_ is None: self.compiled_query_ = CompiledQuery()
finally:
self.lazy_init_lock_.release()
return self.compiled_query_
def mutable_compiled_query(self): self.has_compiled_query_ = 1; return self.compiled_query()
def clear_compiled_query(self):
if self.has_compiled_query_:
self.has_compiled_query_ = 0;
if self.compiled_query_ is not None: self.compiled_query_.Clear()
def has_compiled_query(self): return self.has_compiled_query_
def compiled_cursor(self):
if self.compiled_cursor_ is None:
self.lazy_init_lock_.acquire()
try:
if self.compiled_cursor_ is None: self.compiled_cursor_ = CompiledCursor()
finally:
self.lazy_init_lock_.release()
return self.compiled_cursor_
def mutable_compiled_cursor(self): self.has_compiled_cursor_ = 1; return self.compiled_cursor()
def clear_compiled_cursor(self):
if self.has_compiled_cursor_:
self.has_compiled_cursor_ = 0;
if self.compiled_cursor_ is not None: self.compiled_cursor_.Clear()
def has_compiled_cursor(self): return self.has_compiled_cursor_
def index_size(self): return len(self.index_)
def index_list(self): return self.index_
def index(self, i):
return self.index_[i]
def mutable_index(self, i):
return self.index_[i]
def add_index(self):
x = CompositeIndex()
self.index_.append(x)
return x
def clear_index(self):
self.index_ = []
def version_size(self): return len(self.version_)
def version_list(self): return self.version_
def version(self, i):
return self.version_[i]
def set_version(self, i, x):
self.version_[i] = x
def add_version(self, x):
self.version_.append(x)
def clear_version(self):
self.version_ = []
def result_compiled_cursor_size(self): return len(self.result_compiled_cursor_)
def result_compiled_cursor_list(self): return self.result_compiled_cursor_
def result_compiled_cursor(self, i):
return self.result_compiled_cursor_[i]
def mutable_result_compiled_cursor(self, i):
return self.result_compiled_cursor_[i]
def add_result_compiled_cursor(self):
x = CompiledCursor()
self.result_compiled_cursor_.append(x)
return x
def clear_result_compiled_cursor(self):
self.result_compiled_cursor_ = []
def skipped_results_compiled_cursor(self):
if self.skipped_results_compiled_cursor_ is None:
self.lazy_init_lock_.acquire()
try:
if self.skipped_results_compiled_cursor_ is None: self.skipped_results_compiled_cursor_ = CompiledCursor()
finally:
self.lazy_init_lock_.release()
return self.skipped_results_compiled_cursor_
def mutable_skipped_results_compiled_cursor(self): self.has_skipped_results_compiled_cursor_ = 1; return self.skipped_results_compiled_cursor()
def clear_skipped_results_compiled_cursor(self):
if self.has_skipped_results_compiled_cursor_:
self.has_skipped_results_compiled_cursor_ = 0;
if self.skipped_results_compiled_cursor_ is not None: self.skipped_results_compiled_cursor_.Clear()
def has_skipped_results_compiled_cursor(self): return self.has_skipped_results_compiled_cursor_
def MergeFrom(self, x):
assert x is not self
if (x.has_cursor()): self.mutable_cursor().MergeFrom(x.cursor())
for i in xrange(x.result_size()): self.add_result().CopyFrom(x.result(i))
if (x.has_skipped_results()): self.set_skipped_results(x.skipped_results())
if (x.has_more_results()): self.set_more_results(x.more_results())
if (x.has_keys_only()): self.set_keys_only(x.keys_only())
if (x.has_index_only()): self.set_index_only(x.index_only())
if (x.has_small_ops()): self.set_small_ops(x.small_ops())
if (x.has_compiled_query()): self.mutable_compiled_query().MergeFrom(x.compiled_query())
if (x.has_compiled_cursor()): self.mutable_compiled_cursor().MergeFrom(x.compiled_cursor())
for i in xrange(x.index_size()): self.add_index().CopyFrom(x.index(i))
for i in xrange(x.version_size()): self.add_version(x.version(i))
for i in xrange(x.result_compiled_cursor_size()): self.add_result_compiled_cursor().CopyFrom(x.result_compiled_cursor(i))
if (x.has_skipped_results_compiled_cursor()): self.mutable_skipped_results_compiled_cursor().MergeFrom(x.skipped_results_compiled_cursor())
def Equals(self, x):
if x is self: return 1
if self.has_cursor_ != x.has_cursor_: return 0
if self.has_cursor_ and self.cursor_ != x.cursor_: return 0
if len(self.result_) != len(x.result_): return 0
for e1, e2 in zip(self.result_, x.result_):
if e1 != e2: return 0
if self.has_skipped_results_ != x.has_skipped_results_: return 0
if self.has_skipped_results_ and self.skipped_results_ != x.skipped_results_: return 0
if self.has_more_results_ != x.has_more_results_: return 0
if self.has_more_results_ and self.more_results_ != x.more_results_: return 0
if self.has_keys_only_ != x.has_keys_only_: return 0
if self.has_keys_only_ and self.keys_only_ != x.keys_only_: return 0
if self.has_index_only_ != x.has_index_only_: return 0
if self.has_index_only_ and self.index_only_ != x.index_only_: return 0
if self.has_small_ops_ != x.has_small_ops_: return 0
if self.has_small_ops_ and self.small_ops_ != x.small_ops_: return 0
if self.has_compiled_query_ != x.has_compiled_query_: return 0
if self.has_compiled_query_ and self.compiled_query_ != x.compiled_query_: return 0
if self.has_compiled_cursor_ != x.has_compiled_cursor_: return 0
if self.has_compiled_cursor_ and self.compiled_cursor_ != x.compiled_cursor_: return 0
if len(self.index_) != len(x.index_): return 0
for e1, e2 in zip(self.index_, x.index_):
if e1 != e2: return 0
if len(self.version_) != len(x.version_): return 0
for e1, e2 in zip(self.version_, x.version_):
if e1 != e2: return 0
if len(self.result_compiled_cursor_) != len(x.result_compiled_cursor_): return 0
for e1, e2 in zip(self.result_compiled_cursor_, x.result_compiled_cursor_):
if e1 != e2: return 0
if self.has_skipped_results_compiled_cursor_ != x.has_skipped_results_compiled_cursor_: return 0
if self.has_skipped_results_compiled_cursor_ and self.skipped_results_compiled_cursor_ != x.skipped_results_compiled_cursor_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_cursor_ and not self.cursor_.IsInitialized(debug_strs)): initialized = 0
for p in self.result_:
if not p.IsInitialized(debug_strs): initialized=0
if (not self.has_more_results_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: more_results not set.')
if (self.has_compiled_query_ and not self.compiled_query_.IsInitialized(debug_strs)): initialized = 0
if (self.has_compiled_cursor_ and not self.compiled_cursor_.IsInitialized(debug_strs)): initialized = 0
for p in self.index_:
if not p.IsInitialized(debug_strs): initialized=0
for p in self.result_compiled_cursor_:
if not p.IsInitialized(debug_strs): initialized=0
if (self.has_skipped_results_compiled_cursor_ and not self.skipped_results_compiled_cursor_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_cursor_): n += 1 + self.lengthString(self.cursor_.ByteSize())
n += 1 * len(self.result_)
for i in xrange(len(self.result_)): n += self.lengthString(self.result_[i].ByteSize())
if (self.has_skipped_results_): n += 1 + self.lengthVarInt64(self.skipped_results_)
if (self.has_keys_only_): n += 2
if (self.has_index_only_): n += 2
if (self.has_small_ops_): n += 2
if (self.has_compiled_query_): n += 1 + self.lengthString(self.compiled_query_.ByteSize())
if (self.has_compiled_cursor_): n += 1 + self.lengthString(self.compiled_cursor_.ByteSize())
n += 1 * len(self.index_)
for i in xrange(len(self.index_)): n += self.lengthString(self.index_[i].ByteSize())
n += 1 * len(self.version_)
for i in xrange(len(self.version_)): n += self.lengthVarInt64(self.version_[i])
n += 1 * len(self.result_compiled_cursor_)
for i in xrange(len(self.result_compiled_cursor_)): n += self.lengthString(self.result_compiled_cursor_[i].ByteSize())
if (self.has_skipped_results_compiled_cursor_): n += 1 + self.lengthString(self.skipped_results_compiled_cursor_.ByteSize())
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_cursor_): n += 1 + self.lengthString(self.cursor_.ByteSizePartial())
n += 1 * len(self.result_)
for i in xrange(len(self.result_)): n += self.lengthString(self.result_[i].ByteSizePartial())
if (self.has_skipped_results_): n += 1 + self.lengthVarInt64(self.skipped_results_)
if (self.has_more_results_):
n += 2
if (self.has_keys_only_): n += 2
if (self.has_index_only_): n += 2
if (self.has_small_ops_): n += 2
if (self.has_compiled_query_): n += 1 + self.lengthString(self.compiled_query_.ByteSizePartial())
if (self.has_compiled_cursor_): n += 1 + self.lengthString(self.compiled_cursor_.ByteSizePartial())
n += 1 * len(self.index_)
for i in xrange(len(self.index_)): n += self.lengthString(self.index_[i].ByteSizePartial())
n += 1 * len(self.version_)
for i in xrange(len(self.version_)): n += self.lengthVarInt64(self.version_[i])
n += 1 * len(self.result_compiled_cursor_)
for i in xrange(len(self.result_compiled_cursor_)): n += self.lengthString(self.result_compiled_cursor_[i].ByteSizePartial())
if (self.has_skipped_results_compiled_cursor_): n += 1 + self.lengthString(self.skipped_results_compiled_cursor_.ByteSizePartial())
return n
def Clear(self):
self.clear_cursor()
self.clear_result()
self.clear_skipped_results()
self.clear_more_results()
self.clear_keys_only()
self.clear_index_only()
self.clear_small_ops()
self.clear_compiled_query()
self.clear_compiled_cursor()
self.clear_index()
self.clear_version()
self.clear_result_compiled_cursor()
self.clear_skipped_results_compiled_cursor()
def OutputUnchecked(self, out):
if (self.has_cursor_):
out.putVarInt32(10)
out.putVarInt32(self.cursor_.ByteSize())
self.cursor_.OutputUnchecked(out)
for i in xrange(len(self.result_)):
out.putVarInt32(18)
out.putVarInt32(self.result_[i].ByteSize())
self.result_[i].OutputUnchecked(out)
out.putVarInt32(24)
out.putBoolean(self.more_results_)
if (self.has_keys_only_):
out.putVarInt32(32)
out.putBoolean(self.keys_only_)
if (self.has_compiled_query_):
out.putVarInt32(42)
out.putVarInt32(self.compiled_query_.ByteSize())
self.compiled_query_.OutputUnchecked(out)
if (self.has_compiled_cursor_):
out.putVarInt32(50)
out.putVarInt32(self.compiled_cursor_.ByteSize())
self.compiled_cursor_.OutputUnchecked(out)
if (self.has_skipped_results_):
out.putVarInt32(56)
out.putVarInt32(self.skipped_results_)
for i in xrange(len(self.index_)):
out.putVarInt32(66)
out.putVarInt32(self.index_[i].ByteSize())
self.index_[i].OutputUnchecked(out)
if (self.has_index_only_):
out.putVarInt32(72)
out.putBoolean(self.index_only_)
if (self.has_small_ops_):
out.putVarInt32(80)
out.putBoolean(self.small_ops_)
for i in xrange(len(self.version_)):
out.putVarInt32(88)
out.putVarInt64(self.version_[i])
for i in xrange(len(self.result_compiled_cursor_)):
out.putVarInt32(98)
out.putVarInt32(self.result_compiled_cursor_[i].ByteSize())
self.result_compiled_cursor_[i].OutputUnchecked(out)
if (self.has_skipped_results_compiled_cursor_):
out.putVarInt32(106)
out.putVarInt32(self.skipped_results_compiled_cursor_.ByteSize())
self.skipped_results_compiled_cursor_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_cursor_):
out.putVarInt32(10)
out.putVarInt32(self.cursor_.ByteSizePartial())
self.cursor_.OutputPartial(out)
for i in xrange(len(self.result_)):
out.putVarInt32(18)
out.putVarInt32(self.result_[i].ByteSizePartial())
self.result_[i].OutputPartial(out)
if (self.has_more_results_):
out.putVarInt32(24)
out.putBoolean(self.more_results_)
if (self.has_keys_only_):
out.putVarInt32(32)
out.putBoolean(self.keys_only_)
if (self.has_compiled_query_):
out.putVarInt32(42)
out.putVarInt32(self.compiled_query_.ByteSizePartial())
self.compiled_query_.OutputPartial(out)
if (self.has_compiled_cursor_):
out.putVarInt32(50)
out.putVarInt32(self.compiled_cursor_.ByteSizePartial())
self.compiled_cursor_.OutputPartial(out)
if (self.has_skipped_results_):
out.putVarInt32(56)
out.putVarInt32(self.skipped_results_)
for i in xrange(len(self.index_)):
out.putVarInt32(66)
out.putVarInt32(self.index_[i].ByteSizePartial())
self.index_[i].OutputPartial(out)
if (self.has_index_only_):
out.putVarInt32(72)
out.putBoolean(self.index_only_)
if (self.has_small_ops_):
out.putVarInt32(80)
out.putBoolean(self.small_ops_)
for i in xrange(len(self.version_)):
out.putVarInt32(88)
out.putVarInt64(self.version_[i])
for i in xrange(len(self.result_compiled_cursor_)):
out.putVarInt32(98)
out.putVarInt32(self.result_compiled_cursor_[i].ByteSizePartial())
self.result_compiled_cursor_[i].OutputPartial(out)
if (self.has_skipped_results_compiled_cursor_):
out.putVarInt32(106)
out.putVarInt32(self.skipped_results_compiled_cursor_.ByteSizePartial())
self.skipped_results_compiled_cursor_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_cursor().TryMerge(tmp)
continue
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_result().TryMerge(tmp)
continue
if tt == 24:
self.set_more_results(d.getBoolean())
continue
if tt == 32:
self.set_keys_only(d.getBoolean())
continue
if tt == 42:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_compiled_query().TryMerge(tmp)
continue
if tt == 50:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_compiled_cursor().TryMerge(tmp)
continue
if tt == 56:
self.set_skipped_results(d.getVarInt32())
continue
if tt == 66:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_index().TryMerge(tmp)
continue
if tt == 72:
self.set_index_only(d.getBoolean())
continue
if tt == 80:
self.set_small_ops(d.getBoolean())
continue
if tt == 88:
self.add_version(d.getVarInt64())
continue
if tt == 98:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_result_compiled_cursor().TryMerge(tmp)
continue
if tt == 106:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_skipped_results_compiled_cursor().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_cursor_:
res+=prefix+"cursor <\n"
res+=self.cursor_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.result_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("result%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_skipped_results_: res+=prefix+("skipped_results: %s\n" % self.DebugFormatInt32(self.skipped_results_))
if self.has_more_results_: res+=prefix+("more_results: %s\n" % self.DebugFormatBool(self.more_results_))
if self.has_keys_only_: res+=prefix+("keys_only: %s\n" % self.DebugFormatBool(self.keys_only_))
if self.has_index_only_: res+=prefix+("index_only: %s\n" % self.DebugFormatBool(self.index_only_))
if self.has_small_ops_: res+=prefix+("small_ops: %s\n" % self.DebugFormatBool(self.small_ops_))
if self.has_compiled_query_:
res+=prefix+"compiled_query <\n"
res+=self.compiled_query_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_compiled_cursor_:
res+=prefix+"compiled_cursor <\n"
res+=self.compiled_cursor_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.index_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("index%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
cnt=0
for e in self.version_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("version%s: %s\n" % (elm, self.DebugFormatInt64(e)))
cnt+=1
cnt=0
for e in self.result_compiled_cursor_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("result_compiled_cursor%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_skipped_results_compiled_cursor_:
res+=prefix+"skipped_results_compiled_cursor <\n"
res+=self.skipped_results_compiled_cursor_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kcursor = 1
kresult = 2
kskipped_results = 7
kmore_results = 3
kkeys_only = 4
kindex_only = 9
ksmall_ops = 10
kcompiled_query = 5
kcompiled_cursor = 6
kindex = 8
kversion = 11
kresult_compiled_cursor = 12
kskipped_results_compiled_cursor = 13
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "cursor",
2: "result",
3: "more_results",
4: "keys_only",
5: "compiled_query",
6: "compiled_cursor",
7: "skipped_results",
8: "index",
9: "index_only",
10: "small_ops",
11: "version",
12: "result_compiled_cursor",
13: "skipped_results_compiled_cursor",
}, 13)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.NUMERIC,
5: ProtocolBuffer.Encoder.STRING,
6: ProtocolBuffer.Encoder.STRING,
7: ProtocolBuffer.Encoder.NUMERIC,
8: ProtocolBuffer.Encoder.STRING,
9: ProtocolBuffer.Encoder.NUMERIC,
10: ProtocolBuffer.Encoder.NUMERIC,
11: ProtocolBuffer.Encoder.NUMERIC,
12: ProtocolBuffer.Encoder.STRING,
13: ProtocolBuffer.Encoder.STRING,
}, 13, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.QueryResult'
class AllocateIdsRequest(ProtocolBuffer.ProtocolMessage):
has_header_ = 0
header_ = None
has_model_key_ = 0
model_key_ = None
has_size_ = 0
size_ = 0
has_max_ = 0
max_ = 0
has_trusted_ = 0
trusted_ = 0
def __init__(self, contents=None):
self.reserve_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def header(self):
if self.header_ is None:
self.lazy_init_lock_.acquire()
try:
if self.header_ is None: self.header_ = InternalHeader()
finally:
self.lazy_init_lock_.release()
return self.header_
def mutable_header(self): self.has_header_ = 1; return self.header()
def clear_header(self):
if self.has_header_:
self.has_header_ = 0;
if self.header_ is not None: self.header_.Clear()
def has_header(self): return self.has_header_
def model_key(self):
if self.model_key_ is None:
self.lazy_init_lock_.acquire()
try:
if self.model_key_ is None: self.model_key_ = Reference()
finally:
self.lazy_init_lock_.release()
return self.model_key_
def mutable_model_key(self): self.has_model_key_ = 1; return self.model_key()
def clear_model_key(self):
if self.has_model_key_:
self.has_model_key_ = 0;
if self.model_key_ is not None: self.model_key_.Clear()
def has_model_key(self): return self.has_model_key_
def size(self): return self.size_
def set_size(self, x):
self.has_size_ = 1
self.size_ = x
def clear_size(self):
if self.has_size_:
self.has_size_ = 0
self.size_ = 0
def has_size(self): return self.has_size_
def max(self): return self.max_
def set_max(self, x):
self.has_max_ = 1
self.max_ = x
def clear_max(self):
if self.has_max_:
self.has_max_ = 0
self.max_ = 0
def has_max(self): return self.has_max_
def reserve_size(self): return len(self.reserve_)
def reserve_list(self): return self.reserve_
def reserve(self, i):
return self.reserve_[i]
def mutable_reserve(self, i):
return self.reserve_[i]
def add_reserve(self):
x = Reference()
self.reserve_.append(x)
return x
def clear_reserve(self):
self.reserve_ = []
def trusted(self): return self.trusted_
def set_trusted(self, x):
self.has_trusted_ = 1
self.trusted_ = x
def clear_trusted(self):
if self.has_trusted_:
self.has_trusted_ = 0
self.trusted_ = 0
def has_trusted(self): return self.has_trusted_
def MergeFrom(self, x):
assert x is not self
if (x.has_header()): self.mutable_header().MergeFrom(x.header())
if (x.has_model_key()): self.mutable_model_key().MergeFrom(x.model_key())
if (x.has_size()): self.set_size(x.size())
if (x.has_max()): self.set_max(x.max())
for i in xrange(x.reserve_size()): self.add_reserve().CopyFrom(x.reserve(i))
if (x.has_trusted()): self.set_trusted(x.trusted())
def Equals(self, x):
if x is self: return 1
if self.has_header_ != x.has_header_: return 0
if self.has_header_ and self.header_ != x.header_: return 0
if self.has_model_key_ != x.has_model_key_: return 0
if self.has_model_key_ and self.model_key_ != x.model_key_: return 0
if self.has_size_ != x.has_size_: return 0
if self.has_size_ and self.size_ != x.size_: return 0
if self.has_max_ != x.has_max_: return 0
if self.has_max_ and self.max_ != x.max_: return 0
if len(self.reserve_) != len(x.reserve_): return 0
for e1, e2 in zip(self.reserve_, x.reserve_):
if e1 != e2: return 0
if self.has_trusted_ != x.has_trusted_: return 0
if self.has_trusted_ and self.trusted_ != x.trusted_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_header_ and not self.header_.IsInitialized(debug_strs)): initialized = 0
if (self.has_model_key_ and not self.model_key_.IsInitialized(debug_strs)): initialized = 0
for p in self.reserve_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSize())
if (self.has_model_key_): n += 1 + self.lengthString(self.model_key_.ByteSize())
if (self.has_size_): n += 1 + self.lengthVarInt64(self.size_)
if (self.has_max_): n += 1 + self.lengthVarInt64(self.max_)
n += 1 * len(self.reserve_)
for i in xrange(len(self.reserve_)): n += self.lengthString(self.reserve_[i].ByteSize())
if (self.has_trusted_): n += 2
return n
def ByteSizePartial(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSizePartial())
if (self.has_model_key_): n += 1 + self.lengthString(self.model_key_.ByteSizePartial())
if (self.has_size_): n += 1 + self.lengthVarInt64(self.size_)
if (self.has_max_): n += 1 + self.lengthVarInt64(self.max_)
n += 1 * len(self.reserve_)
for i in xrange(len(self.reserve_)): n += self.lengthString(self.reserve_[i].ByteSizePartial())
if (self.has_trusted_): n += 2
return n
def Clear(self):
self.clear_header()
self.clear_model_key()
self.clear_size()
self.clear_max()
self.clear_reserve()
self.clear_trusted()
def OutputUnchecked(self, out):
if (self.has_model_key_):
out.putVarInt32(10)
out.putVarInt32(self.model_key_.ByteSize())
self.model_key_.OutputUnchecked(out)
if (self.has_size_):
out.putVarInt32(16)
out.putVarInt64(self.size_)
if (self.has_max_):
out.putVarInt32(24)
out.putVarInt64(self.max_)
if (self.has_header_):
out.putVarInt32(34)
out.putVarInt32(self.header_.ByteSize())
self.header_.OutputUnchecked(out)
for i in xrange(len(self.reserve_)):
out.putVarInt32(42)
out.putVarInt32(self.reserve_[i].ByteSize())
self.reserve_[i].OutputUnchecked(out)
if (self.has_trusted_):
out.putVarInt32(48)
out.putBoolean(self.trusted_)
def OutputPartial(self, out):
if (self.has_model_key_):
out.putVarInt32(10)
out.putVarInt32(self.model_key_.ByteSizePartial())
self.model_key_.OutputPartial(out)
if (self.has_size_):
out.putVarInt32(16)
out.putVarInt64(self.size_)
if (self.has_max_):
out.putVarInt32(24)
out.putVarInt64(self.max_)
if (self.has_header_):
out.putVarInt32(34)
out.putVarInt32(self.header_.ByteSizePartial())
self.header_.OutputPartial(out)
for i in xrange(len(self.reserve_)):
out.putVarInt32(42)
out.putVarInt32(self.reserve_[i].ByteSizePartial())
self.reserve_[i].OutputPartial(out)
if (self.has_trusted_):
out.putVarInt32(48)
out.putBoolean(self.trusted_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_model_key().TryMerge(tmp)
continue
if tt == 16:
self.set_size(d.getVarInt64())
continue
if tt == 24:
self.set_max(d.getVarInt64())
continue
if tt == 34:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_header().TryMerge(tmp)
continue
if tt == 42:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_reserve().TryMerge(tmp)
continue
if tt == 48:
self.set_trusted(d.getBoolean())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_header_:
res+=prefix+"header <\n"
res+=self.header_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_model_key_:
res+=prefix+"model_key <\n"
res+=self.model_key_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_size_: res+=prefix+("size: %s\n" % self.DebugFormatInt64(self.size_))
if self.has_max_: res+=prefix+("max: %s\n" % self.DebugFormatInt64(self.max_))
cnt=0
for e in self.reserve_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("reserve%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_trusted_: res+=prefix+("trusted: %s\n" % self.DebugFormatBool(self.trusted_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kheader = 4
kmodel_key = 1
ksize = 2
kmax = 3
kreserve = 5
ktrusted = 6
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "model_key",
2: "size",
3: "max",
4: "header",
5: "reserve",
6: "trusted",
}, 6)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.STRING,
5: ProtocolBuffer.Encoder.STRING,
6: ProtocolBuffer.Encoder.NUMERIC,
}, 6, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.AllocateIdsRequest'
class AllocateIdsResponse(ProtocolBuffer.ProtocolMessage):
has_start_ = 0
start_ = 0
has_end_ = 0
end_ = 0
has_cost_ = 0
cost_ = None
def __init__(self, contents=None):
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def start(self): return self.start_
def set_start(self, x):
self.has_start_ = 1
self.start_ = x
def clear_start(self):
if self.has_start_:
self.has_start_ = 0
self.start_ = 0
def has_start(self): return self.has_start_
def end(self): return self.end_
def set_end(self, x):
self.has_end_ = 1
self.end_ = x
def clear_end(self):
if self.has_end_:
self.has_end_ = 0
self.end_ = 0
def has_end(self): return self.has_end_
def cost(self):
if self.cost_ is None:
self.lazy_init_lock_.acquire()
try:
if self.cost_ is None: self.cost_ = Cost()
finally:
self.lazy_init_lock_.release()
return self.cost_
def mutable_cost(self): self.has_cost_ = 1; return self.cost()
def clear_cost(self):
if self.has_cost_:
self.has_cost_ = 0;
if self.cost_ is not None: self.cost_.Clear()
def has_cost(self): return self.has_cost_
def MergeFrom(self, x):
assert x is not self
if (x.has_start()): self.set_start(x.start())
if (x.has_end()): self.set_end(x.end())
if (x.has_cost()): self.mutable_cost().MergeFrom(x.cost())
def Equals(self, x):
if x is self: return 1
if self.has_start_ != x.has_start_: return 0
if self.has_start_ and self.start_ != x.start_: return 0
if self.has_end_ != x.has_end_: return 0
if self.has_end_ and self.end_ != x.end_: return 0
if self.has_cost_ != x.has_cost_: return 0
if self.has_cost_ and self.cost_ != x.cost_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_start_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: start not set.')
if (not self.has_end_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: end not set.')
if (self.has_cost_ and not self.cost_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.start_)
n += self.lengthVarInt64(self.end_)
if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSize())
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_start_):
n += 1
n += self.lengthVarInt64(self.start_)
if (self.has_end_):
n += 1
n += self.lengthVarInt64(self.end_)
if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSizePartial())
return n
def Clear(self):
self.clear_start()
self.clear_end()
self.clear_cost()
def OutputUnchecked(self, out):
out.putVarInt32(8)
out.putVarInt64(self.start_)
out.putVarInt32(16)
out.putVarInt64(self.end_)
if (self.has_cost_):
out.putVarInt32(26)
out.putVarInt32(self.cost_.ByteSize())
self.cost_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_start_):
out.putVarInt32(8)
out.putVarInt64(self.start_)
if (self.has_end_):
out.putVarInt32(16)
out.putVarInt64(self.end_)
if (self.has_cost_):
out.putVarInt32(26)
out.putVarInt32(self.cost_.ByteSizePartial())
self.cost_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_start(d.getVarInt64())
continue
if tt == 16:
self.set_end(d.getVarInt64())
continue
if tt == 26:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_cost().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_start_: res+=prefix+("start: %s\n" % self.DebugFormatInt64(self.start_))
if self.has_end_: res+=prefix+("end: %s\n" % self.DebugFormatInt64(self.end_))
if self.has_cost_:
res+=prefix+"cost <\n"
res+=self.cost_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kstart = 1
kend = 2
kcost = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "start",
2: "end",
3: "cost",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.NUMERIC,
2: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.STRING,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.AllocateIdsResponse'
class CompositeIndices(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
self.index_ = []
if contents is not None: self.MergeFromString(contents)
def index_size(self): return len(self.index_)
def index_list(self): return self.index_
def index(self, i):
return self.index_[i]
def mutable_index(self, i):
return self.index_[i]
def add_index(self):
x = CompositeIndex()
self.index_.append(x)
return x
def clear_index(self):
self.index_ = []
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.index_size()): self.add_index().CopyFrom(x.index(i))
def Equals(self, x):
if x is self: return 1
if len(self.index_) != len(x.index_): return 0
for e1, e2 in zip(self.index_, x.index_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.index_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += 1 * len(self.index_)
for i in xrange(len(self.index_)): n += self.lengthString(self.index_[i].ByteSize())
return n
def ByteSizePartial(self):
n = 0
n += 1 * len(self.index_)
for i in xrange(len(self.index_)): n += self.lengthString(self.index_[i].ByteSizePartial())
return n
def Clear(self):
self.clear_index()
def OutputUnchecked(self, out):
for i in xrange(len(self.index_)):
out.putVarInt32(10)
out.putVarInt32(self.index_[i].ByteSize())
self.index_[i].OutputUnchecked(out)
def OutputPartial(self, out):
for i in xrange(len(self.index_)):
out.putVarInt32(10)
out.putVarInt32(self.index_[i].ByteSizePartial())
self.index_[i].OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_index().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.index_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("index%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kindex = 1
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "index",
}, 1)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
}, 1, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.CompositeIndices'
class AddActionsRequest(ProtocolBuffer.ProtocolMessage):
has_header_ = 0
header_ = None
has_transaction_ = 0
def __init__(self, contents=None):
self.transaction_ = Transaction()
self.action_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def header(self):
if self.header_ is None:
self.lazy_init_lock_.acquire()
try:
if self.header_ is None: self.header_ = InternalHeader()
finally:
self.lazy_init_lock_.release()
return self.header_
def mutable_header(self): self.has_header_ = 1; return self.header()
def clear_header(self):
if self.has_header_:
self.has_header_ = 0;
if self.header_ is not None: self.header_.Clear()
def has_header(self): return self.has_header_
def transaction(self): return self.transaction_
def mutable_transaction(self): self.has_transaction_ = 1; return self.transaction_
def clear_transaction(self):self.has_transaction_ = 0; self.transaction_.Clear()
def has_transaction(self): return self.has_transaction_
def action_size(self): return len(self.action_)
def action_list(self): return self.action_
def action(self, i):
return self.action_[i]
def mutable_action(self, i):
return self.action_[i]
def add_action(self):
x = Action()
self.action_.append(x)
return x
def clear_action(self):
self.action_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_header()): self.mutable_header().MergeFrom(x.header())
if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction())
for i in xrange(x.action_size()): self.add_action().CopyFrom(x.action(i))
def Equals(self, x):
if x is self: return 1
if self.has_header_ != x.has_header_: return 0
if self.has_header_ and self.header_ != x.header_: return 0
if self.has_transaction_ != x.has_transaction_: return 0
if self.has_transaction_ and self.transaction_ != x.transaction_: return 0
if len(self.action_) != len(x.action_): return 0
for e1, e2 in zip(self.action_, x.action_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_header_ and not self.header_.IsInitialized(debug_strs)): initialized = 0
if (not self.has_transaction_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: transaction not set.')
elif not self.transaction_.IsInitialized(debug_strs): initialized = 0
for p in self.action_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSize())
n += self.lengthString(self.transaction_.ByteSize())
n += 1 * len(self.action_)
for i in xrange(len(self.action_)): n += self.lengthString(self.action_[i].ByteSize())
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSizePartial())
if (self.has_transaction_):
n += 1
n += self.lengthString(self.transaction_.ByteSizePartial())
n += 1 * len(self.action_)
for i in xrange(len(self.action_)): n += self.lengthString(self.action_[i].ByteSizePartial())
return n
def Clear(self):
self.clear_header()
self.clear_transaction()
self.clear_action()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putVarInt32(self.transaction_.ByteSize())
self.transaction_.OutputUnchecked(out)
for i in xrange(len(self.action_)):
out.putVarInt32(18)
out.putVarInt32(self.action_[i].ByteSize())
self.action_[i].OutputUnchecked(out)
if (self.has_header_):
out.putVarInt32(26)
out.putVarInt32(self.header_.ByteSize())
self.header_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_transaction_):
out.putVarInt32(10)
out.putVarInt32(self.transaction_.ByteSizePartial())
self.transaction_.OutputPartial(out)
for i in xrange(len(self.action_)):
out.putVarInt32(18)
out.putVarInt32(self.action_[i].ByteSizePartial())
self.action_[i].OutputPartial(out)
if (self.has_header_):
out.putVarInt32(26)
out.putVarInt32(self.header_.ByteSizePartial())
self.header_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_transaction().TryMerge(tmp)
continue
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_action().TryMerge(tmp)
continue
if tt == 26:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_header().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_header_:
res+=prefix+"header <\n"
res+=self.header_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_transaction_:
res+=prefix+"transaction <\n"
res+=self.transaction_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.action_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("action%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kheader = 3
ktransaction = 1
kaction = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "transaction",
2: "action",
3: "header",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.STRING,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.AddActionsRequest'
class AddActionsResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n
def ByteSizePartial(self):
n = 0
return n
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def OutputPartial(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
}, 0)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
}, 0, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.AddActionsResponse'
class BeginTransactionRequest(ProtocolBuffer.ProtocolMessage):
has_header_ = 0
header_ = None
has_app_ = 0
app_ = ""
has_allow_multiple_eg_ = 0
allow_multiple_eg_ = 0
def __init__(self, contents=None):
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def header(self):
if self.header_ is None:
self.lazy_init_lock_.acquire()
try:
if self.header_ is None: self.header_ = InternalHeader()
finally:
self.lazy_init_lock_.release()
return self.header_
def mutable_header(self): self.has_header_ = 1; return self.header()
def clear_header(self):
if self.has_header_:
self.has_header_ = 0;
if self.header_ is not None: self.header_.Clear()
def has_header(self): return self.has_header_
def app(self): return self.app_
def set_app(self, x):
self.has_app_ = 1
self.app_ = x
def clear_app(self):
if self.has_app_:
self.has_app_ = 0
self.app_ = ""
def has_app(self): return self.has_app_
def allow_multiple_eg(self): return self.allow_multiple_eg_
def set_allow_multiple_eg(self, x):
self.has_allow_multiple_eg_ = 1
self.allow_multiple_eg_ = x
def clear_allow_multiple_eg(self):
if self.has_allow_multiple_eg_:
self.has_allow_multiple_eg_ = 0
self.allow_multiple_eg_ = 0
def has_allow_multiple_eg(self): return self.has_allow_multiple_eg_
def MergeFrom(self, x):
assert x is not self
if (x.has_header()): self.mutable_header().MergeFrom(x.header())
if (x.has_app()): self.set_app(x.app())
if (x.has_allow_multiple_eg()): self.set_allow_multiple_eg(x.allow_multiple_eg())
def Equals(self, x):
if x is self: return 1
if self.has_header_ != x.has_header_: return 0
if self.has_header_ and self.header_ != x.header_: return 0
if self.has_app_ != x.has_app_: return 0
if self.has_app_ and self.app_ != x.app_: return 0
if self.has_allow_multiple_eg_ != x.has_allow_multiple_eg_: return 0
if self.has_allow_multiple_eg_ and self.allow_multiple_eg_ != x.allow_multiple_eg_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_header_ and not self.header_.IsInitialized(debug_strs)): initialized = 0
if (not self.has_app_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: app not set.')
return initialized
def ByteSize(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSize())
n += self.lengthString(len(self.app_))
if (self.has_allow_multiple_eg_): n += 2
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSizePartial())
if (self.has_app_):
n += 1
n += self.lengthString(len(self.app_))
if (self.has_allow_multiple_eg_): n += 2
return n
def Clear(self):
self.clear_header()
self.clear_app()
self.clear_allow_multiple_eg()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.app_)
if (self.has_allow_multiple_eg_):
out.putVarInt32(16)
out.putBoolean(self.allow_multiple_eg_)
if (self.has_header_):
out.putVarInt32(26)
out.putVarInt32(self.header_.ByteSize())
self.header_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_app_):
out.putVarInt32(10)
out.putPrefixedString(self.app_)
if (self.has_allow_multiple_eg_):
out.putVarInt32(16)
out.putBoolean(self.allow_multiple_eg_)
if (self.has_header_):
out.putVarInt32(26)
out.putVarInt32(self.header_.ByteSizePartial())
self.header_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_app(d.getPrefixedString())
continue
if tt == 16:
self.set_allow_multiple_eg(d.getBoolean())
continue
if tt == 26:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_header().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_header_:
res+=prefix+"header <\n"
res+=self.header_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_app_: res+=prefix+("app: %s\n" % self.DebugFormatString(self.app_))
if self.has_allow_multiple_eg_: res+=prefix+("allow_multiple_eg: %s\n" % self.DebugFormatBool(self.allow_multiple_eg_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kheader = 3
kapp = 1
kallow_multiple_eg = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "app",
2: "allow_multiple_eg",
3: "header",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.STRING,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.BeginTransactionRequest'
class CommitResponse_Version(ProtocolBuffer.ProtocolMessage):
has_root_entity_key_ = 0
has_version_ = 0
version_ = 0
def __init__(self, contents=None):
self.root_entity_key_ = Reference()
if contents is not None: self.MergeFromString(contents)
def root_entity_key(self): return self.root_entity_key_
def mutable_root_entity_key(self): self.has_root_entity_key_ = 1; return self.root_entity_key_
def clear_root_entity_key(self):self.has_root_entity_key_ = 0; self.root_entity_key_.Clear()
def has_root_entity_key(self): return self.has_root_entity_key_
def version(self): return self.version_
def set_version(self, x):
self.has_version_ = 1
self.version_ = x
def clear_version(self):
if self.has_version_:
self.has_version_ = 0
self.version_ = 0
def has_version(self): return self.has_version_
def MergeFrom(self, x):
assert x is not self
if (x.has_root_entity_key()): self.mutable_root_entity_key().MergeFrom(x.root_entity_key())
if (x.has_version()): self.set_version(x.version())
def Equals(self, x):
if x is self: return 1
if self.has_root_entity_key_ != x.has_root_entity_key_: return 0
if self.has_root_entity_key_ and self.root_entity_key_ != x.root_entity_key_: return 0
if self.has_version_ != x.has_version_: return 0
if self.has_version_ and self.version_ != x.version_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_root_entity_key_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: root_entity_key not set.')
elif not self.root_entity_key_.IsInitialized(debug_strs): initialized = 0
if (not self.has_version_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: version not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(self.root_entity_key_.ByteSize())
n += self.lengthVarInt64(self.version_)
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_root_entity_key_):
n += 1
n += self.lengthString(self.root_entity_key_.ByteSizePartial())
if (self.has_version_):
n += 1
n += self.lengthVarInt64(self.version_)
return n
def Clear(self):
self.clear_root_entity_key()
self.clear_version()
def OutputUnchecked(self, out):
out.putVarInt32(34)
out.putVarInt32(self.root_entity_key_.ByteSize())
self.root_entity_key_.OutputUnchecked(out)
out.putVarInt32(40)
out.putVarInt64(self.version_)
def OutputPartial(self, out):
if (self.has_root_entity_key_):
out.putVarInt32(34)
out.putVarInt32(self.root_entity_key_.ByteSizePartial())
self.root_entity_key_.OutputPartial(out)
if (self.has_version_):
out.putVarInt32(40)
out.putVarInt64(self.version_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 28: break
if tt == 34:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_root_entity_key().TryMerge(tmp)
continue
if tt == 40:
self.set_version(d.getVarInt64())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_root_entity_key_:
res+=prefix+"root_entity_key <\n"
res+=self.root_entity_key_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_version_: res+=prefix+("version: %s\n" % self.DebugFormatInt64(self.version_))
return res
class CommitResponse(ProtocolBuffer.ProtocolMessage):
has_cost_ = 0
cost_ = None
def __init__(self, contents=None):
self.version_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def cost(self):
if self.cost_ is None:
self.lazy_init_lock_.acquire()
try:
if self.cost_ is None: self.cost_ = Cost()
finally:
self.lazy_init_lock_.release()
return self.cost_
def mutable_cost(self): self.has_cost_ = 1; return self.cost()
def clear_cost(self):
if self.has_cost_:
self.has_cost_ = 0;
if self.cost_ is not None: self.cost_.Clear()
def has_cost(self): return self.has_cost_
def version_size(self): return len(self.version_)
def version_list(self): return self.version_
def version(self, i):
return self.version_[i]
def mutable_version(self, i):
return self.version_[i]
def add_version(self):
x = CommitResponse_Version()
self.version_.append(x)
return x
def clear_version(self):
self.version_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_cost()): self.mutable_cost().MergeFrom(x.cost())
for i in xrange(x.version_size()): self.add_version().CopyFrom(x.version(i))
def Equals(self, x):
if x is self: return 1
if self.has_cost_ != x.has_cost_: return 0
if self.has_cost_ and self.cost_ != x.cost_: return 0
if len(self.version_) != len(x.version_): return 0
for e1, e2 in zip(self.version_, x.version_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_cost_ and not self.cost_.IsInitialized(debug_strs)): initialized = 0
for p in self.version_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSize())
n += 2 * len(self.version_)
for i in xrange(len(self.version_)): n += self.version_[i].ByteSize()
return n
def ByteSizePartial(self):
n = 0
if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSizePartial())
n += 2 * len(self.version_)
for i in xrange(len(self.version_)): n += self.version_[i].ByteSizePartial()
return n
def Clear(self):
self.clear_cost()
self.clear_version()
def OutputUnchecked(self, out):
if (self.has_cost_):
out.putVarInt32(10)
out.putVarInt32(self.cost_.ByteSize())
self.cost_.OutputUnchecked(out)
for i in xrange(len(self.version_)):
out.putVarInt32(27)
self.version_[i].OutputUnchecked(out)
out.putVarInt32(28)
def OutputPartial(self, out):
if (self.has_cost_):
out.putVarInt32(10)
out.putVarInt32(self.cost_.ByteSizePartial())
self.cost_.OutputPartial(out)
for i in xrange(len(self.version_)):
out.putVarInt32(27)
self.version_[i].OutputPartial(out)
out.putVarInt32(28)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_cost().TryMerge(tmp)
continue
if tt == 27:
self.add_version().TryMerge(d)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_cost_:
res+=prefix+"cost <\n"
res+=self.cost_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.version_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Version%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kcost = 1
kVersionGroup = 3
kVersionroot_entity_key = 4
kVersionversion = 5
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "cost",
3: "Version",
4: "root_entity_key",
5: "version",
}, 5)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.STARTGROUP,
4: ProtocolBuffer.Encoder.STRING,
5: ProtocolBuffer.Encoder.NUMERIC,
}, 5, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.CommitResponse'
if _extension_runtime:
pass
__all__ = ['InternalHeader','Transaction','Query','Query_Filter','Query_Order','CompiledQuery','CompiledQuery_PrimaryScan','CompiledQuery_MergeJoinScan','CompiledQuery_EntityFilter','CompiledCursor','CompiledCursor_PositionIndexValue','CompiledCursor_Position','Cursor','Error','Cost','Cost_CommitCost','GetRequest','GetResponse','GetResponse_Entity','PutRequest','PutResponse','TouchRequest','TouchResponse','DeleteRequest','DeleteResponse','NextRequest','QueryResult','AllocateIdsRequest','AllocateIdsResponse','CompositeIndices','AddActionsRequest','AddActionsResponse','BeginTransactionRequest','CommitResponse','CommitResponse_Version']
|
ychen820/microblog
|
y/google-cloud-sdk/platform/google_appengine/google/appengine/datastore/datastore_v3_pb.py
|
Python
|
bsd-3-clause
| 282,355 |
// Copyright 2012 tsuru authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package provisiontest
import (
"fmt"
"io"
"io/ioutil"
"net/url"
"sort"
"sync"
"sync/atomic"
"time"
docker "github.com/fsouza/go-dockerclient"
"github.com/pkg/errors"
"github.com/tsuru/tsuru/action"
"github.com/tsuru/tsuru/app/bind"
"github.com/tsuru/tsuru/event"
"github.com/tsuru/tsuru/net"
"github.com/tsuru/tsuru/provision"
"github.com/tsuru/tsuru/provision/dockercommon"
"github.com/tsuru/tsuru/quota"
"github.com/tsuru/tsuru/router/routertest"
appTypes "github.com/tsuru/tsuru/types/app"
)
var (
ProvisionerInstance *FakeProvisioner
errNotProvisioned = &provision.Error{Reason: "App is not provisioned."}
uniqueIpCounter int32 = 0
_ provision.NodeProvisioner = &FakeProvisioner{}
_ provision.Provisioner = &FakeProvisioner{}
_ provision.App = &FakeApp{}
_ bind.App = &FakeApp{}
)
const fakeAppImage = "app-image"
func init() {
ProvisionerInstance = NewFakeProvisioner()
provision.Register("fake", func() (provision.Provisioner, error) {
return ProvisionerInstance, nil
})
}
// Fake implementation for provision.App.
type FakeApp struct {
name string
cname []string
IP string
platform string
units []provision.Unit
logs []string
logMut sync.Mutex
Commands []string
Memory int64
Swap int64
CpuShare int
commMut sync.Mutex
Deploys uint
env map[string]bind.EnvVar
bindCalls []*provision.Unit
bindLock sync.Mutex
serviceEnvs []bind.ServiceEnvVar
serviceLock sync.Mutex
Pool string
UpdatePlatform bool
TeamOwner string
Teams []string
quota.Quota
}
func NewFakeApp(name, platform string, units int) *FakeApp {
app := FakeApp{
name: name,
platform: platform,
units: make([]provision.Unit, units),
Quota: quota.Unlimited,
Pool: "test-default",
}
routertest.FakeRouter.AddBackend(&app)
namefmt := "%s-%d"
for i := 0; i < units; i++ {
val := atomic.AddInt32(&uniqueIpCounter, 1)
app.units[i] = provision.Unit{
ID: fmt.Sprintf(namefmt, name, i),
Status: provision.StatusStarted,
IP: fmt.Sprintf("10.10.10.%d", val),
Address: &url.URL{
Scheme: "http",
Host: fmt.Sprintf("10.10.10.%d:%d", val, val),
},
}
}
return &app
}
func (a *FakeApp) GetMemory() int64 {
return a.Memory
}
func (a *FakeApp) GetSwap() int64 {
return a.Swap
}
func (a *FakeApp) GetCpuShare() int {
return a.CpuShare
}
func (a *FakeApp) GetTeamsName() []string {
return a.Teams
}
func (a *FakeApp) HasBind(unit *provision.Unit) bool {
a.bindLock.Lock()
defer a.bindLock.Unlock()
for _, u := range a.bindCalls {
if u.ID == unit.ID {
return true
}
}
return false
}
func (a *FakeApp) BindUnit(unit *provision.Unit) error {
a.bindLock.Lock()
defer a.bindLock.Unlock()
a.bindCalls = append(a.bindCalls, unit)
return nil
}
func (a *FakeApp) UnbindUnit(unit *provision.Unit) error {
a.bindLock.Lock()
defer a.bindLock.Unlock()
index := -1
for i, u := range a.bindCalls {
if u.ID == unit.ID {
index = i
break
}
}
if index < 0 {
return errors.New("not bound")
}
length := len(a.bindCalls)
a.bindCalls[index] = a.bindCalls[length-1]
a.bindCalls = a.bindCalls[:length-1]
return nil
}
func (a *FakeApp) GetQuota() quota.Quota {
return a.Quota
}
func (a *FakeApp) SetQuotaInUse(inUse int) error {
if !a.Quota.Unlimited() && inUse > a.Quota.Limit {
return "a.QuotaExceededError{
Requested: uint(inUse),
Available: uint(a.Quota.Limit),
}
}
a.Quota.InUse = inUse
return nil
}
func (a *FakeApp) GetCname() []string {
return a.cname
}
func (a *FakeApp) GetServiceEnvs() []bind.ServiceEnvVar {
a.serviceLock.Lock()
defer a.serviceLock.Unlock()
return a.serviceEnvs
}
func (a *FakeApp) AddInstance(instanceArgs bind.AddInstanceArgs) error {
a.serviceLock.Lock()
defer a.serviceLock.Unlock()
a.serviceEnvs = append(a.serviceEnvs, instanceArgs.Envs...)
if instanceArgs.Writer != nil {
instanceArgs.Writer.Write([]byte("add instance"))
}
return nil
}
func (a *FakeApp) RemoveInstance(instanceArgs bind.RemoveInstanceArgs) error {
a.serviceLock.Lock()
defer a.serviceLock.Unlock()
lenBefore := len(a.serviceEnvs)
for i := 0; i < len(a.serviceEnvs); i++ {
se := a.serviceEnvs[i]
if se.ServiceName == instanceArgs.ServiceName && se.InstanceName == instanceArgs.InstanceName {
a.serviceEnvs = append(a.serviceEnvs[:i], a.serviceEnvs[i+1:]...)
i--
}
}
if len(a.serviceEnvs) == lenBefore {
return errors.New("instance not found")
}
if instanceArgs.Writer != nil {
instanceArgs.Writer.Write([]byte("remove instance"))
}
return nil
}
func (a *FakeApp) Logs() []string {
a.logMut.Lock()
defer a.logMut.Unlock()
logs := make([]string, len(a.logs))
copy(logs, a.logs)
return logs
}
func (a *FakeApp) HasLog(source, unit, message string) bool {
log := source + unit + message
a.logMut.Lock()
defer a.logMut.Unlock()
for _, l := range a.logs {
if l == log {
return true
}
}
return false
}
func (a *FakeApp) GetCommands() []string {
a.commMut.Lock()
defer a.commMut.Unlock()
return a.Commands
}
func (a *FakeApp) Log(message, source, unit string) error {
a.logMut.Lock()
a.logs = append(a.logs, source+unit+message)
a.logMut.Unlock()
return nil
}
func (a *FakeApp) GetName() string {
return a.name
}
func (a *FakeApp) GetPool() string {
return a.Pool
}
func (a *FakeApp) GetPlatform() string {
return a.platform
}
func (a *FakeApp) GetDeploys() uint {
return a.Deploys
}
func (a *FakeApp) GetTeamOwner() string {
return a.TeamOwner
}
func (a *FakeApp) Units() ([]provision.Unit, error) {
return a.units, nil
}
func (a *FakeApp) AddUnit(u provision.Unit) {
a.units = append(a.units, u)
}
func (a *FakeApp) SetEnv(env bind.EnvVar) {
if a.env == nil {
a.env = map[string]bind.EnvVar{}
}
a.env[env.Name] = env
}
func (a *FakeApp) SetEnvs(setEnvs bind.SetEnvArgs) error {
for _, env := range setEnvs.Envs {
a.SetEnv(env)
}
return nil
}
func (a *FakeApp) UnsetEnvs(unsetEnvs bind.UnsetEnvArgs) error {
for _, env := range unsetEnvs.VariableNames {
delete(a.env, env)
}
return nil
}
func (a *FakeApp) GetLock() provision.AppLock {
return nil
}
func (a *FakeApp) GetUnits() ([]bind.Unit, error) {
units := make([]bind.Unit, len(a.units))
for i := range a.units {
units[i] = &a.units[i]
}
return units, nil
}
func (a *FakeApp) Envs() map[string]bind.EnvVar {
return a.env
}
func (a *FakeApp) Run(cmd string, w io.Writer, args provision.RunArgs) error {
a.commMut.Lock()
a.Commands = append(a.Commands, fmt.Sprintf("ran %s", cmd))
a.commMut.Unlock()
return nil
}
func (a *FakeApp) GetUpdatePlatform() bool {
return a.UpdatePlatform
}
func (app *FakeApp) GetRouters() []appTypes.AppRouter {
return []appTypes.AppRouter{{Name: "fake"}}
}
func (app *FakeApp) GetAddresses() ([]string, error) {
addr, err := routertest.FakeRouter.Addr(app.GetName())
if err != nil {
return nil, err
}
return []string{addr}, nil
}
type Cmd struct {
Cmd string
Args []string
App provision.App
}
type failure struct {
method string
err error
}
// Fake implementation for provision.Provisioner.
type FakeProvisioner struct {
Name string
cmds []Cmd
cmdMut sync.Mutex
outputs chan []byte
failures chan failure
apps map[string]provisionedApp
mut sync.RWMutex
shells map[string][]provision.ShellOptions
shellMut sync.Mutex
nodes map[string]FakeNode
nodeContainers map[string]int
}
func NewFakeProvisioner() *FakeProvisioner {
p := FakeProvisioner{Name: "fake"}
p.outputs = make(chan []byte, 8)
p.failures = make(chan failure, 8)
p.apps = make(map[string]provisionedApp)
p.shells = make(map[string][]provision.ShellOptions)
p.nodes = make(map[string]FakeNode)
p.nodeContainers = make(map[string]int)
return &p
}
func (p *FakeProvisioner) getError(method string) error {
select {
case fail := <-p.failures:
if fail.method == method {
return fail.err
}
p.failures <- fail
default:
}
return nil
}
type FakeNode struct {
ID string
Addr string
PoolName string
Meta map[string]string
status string
p *FakeProvisioner
failures int
hasSuccess bool
}
func (n *FakeNode) IaaSID() string {
return n.ID
}
func (n *FakeNode) Pool() string {
return n.PoolName
}
func (n *FakeNode) Address() string {
return n.Addr
}
func (n *FakeNode) Metadata() map[string]string {
return n.Meta
}
func (n *FakeNode) MetadataNoPrefix() map[string]string {
return n.Meta
}
func (n *FakeNode) Units() ([]provision.Unit, error) {
n.p.mut.Lock()
defer n.p.mut.Unlock()
return n.unitsLocked()
}
func (n *FakeNode) unitsLocked() ([]provision.Unit, error) {
var units []provision.Unit
for _, a := range n.p.apps {
for _, u := range a.units {
if net.URLToHost(u.Address.String()) == net.URLToHost(n.Addr) {
units = append(units, u)
}
}
}
return units, nil
}
func (n *FakeNode) Status() string {
return n.status
}
func (n *FakeNode) FailureCount() int {
return n.failures
}
func (n *FakeNode) HasSuccess() bool {
return n.hasSuccess
}
func (n *FakeNode) ResetFailures() {
n.failures = 0
}
func (n *FakeNode) Provisioner() provision.NodeProvisioner {
return n.p
}
func (n *FakeNode) SetHealth(failures int, hasSuccess bool) {
n.failures = failures
n.hasSuccess = hasSuccess
}
func (p *FakeProvisioner) AddNode(opts provision.AddNodeOptions) error {
p.mut.Lock()
defer p.mut.Unlock()
if err := p.getError("AddNode"); err != nil {
return err
}
if err := p.getError("AddNode:" + opts.Address); err != nil {
return err
}
metadata := opts.Metadata
if metadata == nil {
metadata = map[string]string{}
}
if _, ok := p.nodes[opts.Address]; ok {
return errors.New("fake node already exists")
}
p.nodes[opts.Address] = FakeNode{
ID: opts.IaaSID,
Addr: opts.Address,
PoolName: opts.Pool,
Meta: metadata,
p: p,
status: "enabled",
}
return nil
}
func (p *FakeProvisioner) GetNode(address string) (provision.Node, error) {
p.mut.RLock()
defer p.mut.RUnlock()
if err := p.getError("GetNode"); err != nil {
return nil, err
}
if n, ok := p.nodes[address]; ok {
return &n, nil
}
return nil, provision.ErrNodeNotFound
}
func (p *FakeProvisioner) RemoveNode(opts provision.RemoveNodeOptions) error {
p.mut.Lock()
defer p.mut.Unlock()
if err := p.getError("RemoveNode"); err != nil {
return err
}
_, ok := p.nodes[opts.Address]
if !ok {
return provision.ErrNodeNotFound
}
delete(p.nodes, opts.Address)
if opts.Writer != nil {
if opts.Rebalance {
opts.Writer.Write([]byte("rebalancing..."))
p.rebalanceNodesLocked(provision.RebalanceNodesOptions{
Force: true,
})
}
opts.Writer.Write([]byte("remove done!"))
}
return nil
}
func (p *FakeProvisioner) UpdateNode(opts provision.UpdateNodeOptions) error {
p.mut.Lock()
defer p.mut.Unlock()
if err := p.getError("UpdateNode"); err != nil {
return err
}
n, ok := p.nodes[opts.Address]
if !ok {
return provision.ErrNodeNotFound
}
if opts.Pool != "" {
n.PoolName = opts.Pool
}
if opts.Metadata != nil {
n.Meta = opts.Metadata
}
if opts.Enable {
n.status = "enabled"
}
if opts.Disable {
n.status = "disabled"
}
p.nodes[opts.Address] = n
return nil
}
type nodeList []provision.Node
func (l nodeList) Len() int { return len(l) }
func (l nodeList) Swap(i, j int) { l[i], l[j] = l[j], l[i] }
func (l nodeList) Less(i, j int) bool { return l[i].Address() < l[j].Address() }
func (p *FakeProvisioner) ListNodes(addressFilter []string) ([]provision.Node, error) {
p.mut.RLock()
defer p.mut.RUnlock()
if err := p.getError("ListNodes"); err != nil {
return nil, err
}
var result []provision.Node
if addressFilter != nil {
result = make([]provision.Node, 0, len(addressFilter))
for _, a := range addressFilter {
n := p.nodes[a]
result = append(result, &n)
}
} else {
result = make([]provision.Node, 0, len(p.nodes))
for a := range p.nodes {
n := p.nodes[a]
result = append(result, &n)
}
}
sort.Sort(nodeList(result))
return result, nil
}
func (p *FakeProvisioner) NodeForNodeData(nodeData provision.NodeStatusData) (provision.Node, error) {
return provision.FindNodeByAddrs(p, nodeData.Addrs)
}
func (p *FakeProvisioner) RebalanceNodes(opts provision.RebalanceNodesOptions) (bool, error) {
p.mut.Lock()
defer p.mut.Unlock()
return p.rebalanceNodesLocked(opts)
}
func (p *FakeProvisioner) rebalanceNodesLocked(opts provision.RebalanceNodesOptions) (bool, error) {
if err := p.getError("RebalanceNodes"); err != nil {
return true, err
}
var w io.Writer
if opts.Event == nil {
w = ioutil.Discard
} else {
w = opts.Event
}
fmt.Fprintf(w, "rebalancing - dry: %v, force: %v\n", opts.Dry, opts.Force)
if len(opts.AppFilter) != 0 {
fmt.Fprintf(w, "filtering apps: %v\n", opts.AppFilter)
}
if len(opts.MetadataFilter) != 0 {
fmt.Fprintf(w, "filtering metadata: %v\n", opts.MetadataFilter)
}
if opts.Pool != "" {
fmt.Fprintf(w, "filtering pool: %v\n", opts.Pool)
}
if len(p.nodes) == 0 || opts.Dry {
return true, nil
}
max := 0
min := -1
var nodes []FakeNode
for _, n := range p.nodes {
nodes = append(nodes, n)
units, err := n.unitsLocked()
if err != nil {
return true, err
}
unitCount := len(units)
if unitCount > max {
max = unitCount
}
if min == -1 || unitCount < min {
min = unitCount
}
}
if max-min < 2 && !opts.Force {
return false, nil
}
gi := 0
for _, a := range p.apps {
nodeIdx := 0
for i := range a.units {
u := &a.units[i]
firstIdx := nodeIdx
var hostAddr string
for {
idx := nodeIdx
nodeIdx = (nodeIdx + 1) % len(nodes)
if nodes[idx].Pool() == a.app.GetPool() {
hostAddr = net.URLToHost(nodes[idx].Address())
break
}
if nodeIdx == firstIdx {
return true, errors.Errorf("unable to find node for pool %s", a.app.GetPool())
}
}
u.IP = hostAddr
u.Address = &url.URL{
Scheme: "http",
Host: fmt.Sprintf("%s:%d", hostAddr, gi),
}
gi++
}
}
return true, nil
}
// Restarts returns the number of restarts for a given app.
func (p *FakeProvisioner) Restarts(a provision.App, process string) int {
p.mut.RLock()
defer p.mut.RUnlock()
return p.apps[a.GetName()].restarts[process]
}
// Starts returns the number of starts for a given app.
func (p *FakeProvisioner) Starts(app provision.App, process string) int {
p.mut.RLock()
defer p.mut.RUnlock()
return p.apps[app.GetName()].starts[process]
}
// Stops returns the number of stops for a given app.
func (p *FakeProvisioner) Stops(app provision.App, process string) int {
p.mut.RLock()
defer p.mut.RUnlock()
return p.apps[app.GetName()].stops[process]
}
// Sleeps returns the number of sleeps for a given app.
func (p *FakeProvisioner) Sleeps(app provision.App, process string) int {
p.mut.RLock()
defer p.mut.RUnlock()
return p.apps[app.GetName()].sleeps[process]
}
func (p *FakeProvisioner) CustomData(app provision.App) map[string]interface{} {
p.mut.RLock()
defer p.mut.RUnlock()
return p.apps[app.GetName()].lastData
}
// Shells return all shell calls to the given unit.
func (p *FakeProvisioner) Shells(unit string) []provision.ShellOptions {
p.shellMut.Lock()
defer p.shellMut.Unlock()
return p.shells[unit]
}
// Returns the number of calls to restart.
// GetCmds returns a list of commands executed in an app. If you don't specify
// the command (an empty string), it will return all commands executed in the
// given app.
func (p *FakeProvisioner) GetCmds(cmd string, app provision.App) []Cmd {
var cmds []Cmd
p.cmdMut.Lock()
for _, c := range p.cmds {
if (cmd == "" || c.Cmd == cmd) && app.GetName() == c.App.GetName() {
cmds = append(cmds, c)
}
}
p.cmdMut.Unlock()
return cmds
}
// Provisioned checks whether the given app has been provisioned.
func (p *FakeProvisioner) Provisioned(app provision.App) bool {
p.mut.RLock()
defer p.mut.RUnlock()
_, ok := p.apps[app.GetName()]
return ok
}
func (p *FakeProvisioner) GetUnits(app provision.App) []provision.Unit {
p.mut.RLock()
pApp := p.apps[app.GetName()]
p.mut.RUnlock()
return pApp.units
}
// GetAppFromUnitID returns an app from unitID
func (p *FakeProvisioner) GetAppFromUnitID(unitID string) (provision.App, error) {
p.mut.RLock()
defer p.mut.RUnlock()
for _, a := range p.apps {
for _, u := range a.units {
if u.GetID() == unitID {
return a.app, nil
}
}
}
return nil, errors.New("app not found")
}
// PrepareOutput sends the given slice of bytes to a queue of outputs.
//
// Each prepared output will be used in the ExecuteCommand. It might be sent to
// the standard output or standard error. See ExecuteCommand docs for more
// details.
func (p *FakeProvisioner) PrepareOutput(b []byte) {
p.outputs <- b
}
// PrepareFailure prepares a failure for the given method name.
//
// For instance, PrepareFailure("GitDeploy", errors.New("GitDeploy failed")) will
// cause next Deploy call to return the given error. Multiple calls to this
// method will enqueue failures, i.e. three calls to
// PrepareFailure("GitDeploy"...) means that the three next GitDeploy call will
// fail.
func (p *FakeProvisioner) PrepareFailure(method string, err error) {
p.failures <- failure{method, err}
}
// Reset cleans up the FakeProvisioner, deleting all apps and their data. It
// also deletes prepared failures and output. It's like calling
// NewFakeProvisioner again, without all the allocations.
func (p *FakeProvisioner) Reset() {
p.cmdMut.Lock()
p.cmds = nil
p.cmdMut.Unlock()
p.mut.Lock()
p.apps = make(map[string]provisionedApp)
p.mut.Unlock()
p.shellMut.Lock()
p.shells = make(map[string][]provision.ShellOptions)
p.shellMut.Unlock()
p.mut.Lock()
p.nodes = make(map[string]FakeNode)
p.mut.Unlock()
uniqueIpCounter = 0
p.nodeContainers = make(map[string]int)
for {
select {
case <-p.outputs:
case <-p.failures:
default:
return
}
}
}
func (p *FakeProvisioner) Swap(app1, app2 provision.App, cnameOnly bool) error {
return routertest.FakeRouter.Swap(app1.GetName(), app2.GetName(), cnameOnly)
}
func (p *FakeProvisioner) Deploy(app provision.App, img string, evt *event.Event) (string, error) {
if err := p.getError("Deploy"); err != nil {
return "", err
}
p.mut.Lock()
defer p.mut.Unlock()
pApp, ok := p.apps[app.GetName()]
if !ok {
return "", errNotProvisioned
}
pApp.image = img
evt.Write([]byte("Builder deploy called"))
p.apps[app.GetName()] = pApp
return fakeAppImage, nil
}
func (p *FakeProvisioner) GetClient(app provision.App) (provision.BuilderDockerClient, error) {
for _, node := range p.nodes {
client, err := docker.NewClient(node.Addr)
if err != nil {
return nil, err
}
return &dockercommon.PullAndCreateClient{Client: client}, nil
}
return nil, errors.New("No node found")
}
func (p *FakeProvisioner) CleanImage(appName, imgName string) error {
for _, node := range p.nodes {
c, err := docker.NewClient(node.Addr)
if err != nil {
return err
}
err = c.RemoveImage(imgName)
if err != nil && err != docker.ErrNoSuchImage {
return err
}
}
return nil
}
func (p *FakeProvisioner) ArchiveDeploy(app provision.App, archiveURL string, evt *event.Event) (string, error) {
if err := p.getError("ArchiveDeploy"); err != nil {
return "", err
}
p.mut.Lock()
defer p.mut.Unlock()
pApp, ok := p.apps[app.GetName()]
if !ok {
return "", errNotProvisioned
}
evt.Write([]byte("Archive deploy called"))
pApp.lastArchive = archiveURL
p.apps[app.GetName()] = pApp
return fakeAppImage, nil
}
func (p *FakeProvisioner) UploadDeploy(app provision.App, file io.ReadCloser, fileSize int64, build bool, evt *event.Event) (string, error) {
if err := p.getError("UploadDeploy"); err != nil {
return "", err
}
p.mut.Lock()
defer p.mut.Unlock()
pApp, ok := p.apps[app.GetName()]
if !ok {
return "", errNotProvisioned
}
evt.Write([]byte("Upload deploy called"))
pApp.lastFile = file
p.apps[app.GetName()] = pApp
return fakeAppImage, nil
}
func (p *FakeProvisioner) ImageDeploy(app provision.App, img string, evt *event.Event) (string, error) {
if err := p.getError("ImageDeploy"); err != nil {
return "", err
}
p.mut.Lock()
defer p.mut.Unlock()
pApp, ok := p.apps[app.GetName()]
if !ok {
return "", errNotProvisioned
}
pApp.image = img
evt.Write([]byte("Image deploy called"))
p.apps[app.GetName()] = pApp
return img, nil
}
func (p *FakeProvisioner) Rollback(app provision.App, img string, evt *event.Event) (string, error) {
if err := p.getError("Rollback"); err != nil {
return "", err
}
p.mut.Lock()
defer p.mut.Unlock()
pApp, ok := p.apps[app.GetName()]
if !ok {
return "", errNotProvisioned
}
evt.Write([]byte("Rollback deploy called"))
p.apps[app.GetName()] = pApp
return img, nil
}
func (p *FakeProvisioner) Rebuild(app provision.App, evt *event.Event) (string, error) {
if err := p.getError("Rebuild"); err != nil {
return "", err
}
p.mut.Lock()
defer p.mut.Unlock()
pApp, ok := p.apps[app.GetName()]
if !ok {
return "", errNotProvisioned
}
evt.Write([]byte("Rebuild deploy called"))
p.apps[app.GetName()] = pApp
return fakeAppImage, nil
}
func (p *FakeProvisioner) Provision(app provision.App) error {
if err := p.getError("Provision"); err != nil {
return err
}
if p.Provisioned(app) {
return &provision.Error{Reason: "App already provisioned."}
}
p.mut.Lock()
defer p.mut.Unlock()
p.apps[app.GetName()] = provisionedApp{
app: app,
restarts: make(map[string]int),
starts: make(map[string]int),
stops: make(map[string]int),
sleeps: make(map[string]int),
}
return nil
}
func (p *FakeProvisioner) Restart(app provision.App, process string, w io.Writer) error {
if err := p.getError("Restart"); err != nil {
return err
}
p.mut.Lock()
defer p.mut.Unlock()
pApp, ok := p.apps[app.GetName()]
if !ok {
return errNotProvisioned
}
pApp.restarts[process]++
p.apps[app.GetName()] = pApp
if w != nil {
fmt.Fprintf(w, "restarting app")
}
return nil
}
func (p *FakeProvisioner) Start(app provision.App, process string) error {
p.mut.Lock()
defer p.mut.Unlock()
pApp, ok := p.apps[app.GetName()]
if !ok {
return errNotProvisioned
}
pApp.starts[process]++
p.apps[app.GetName()] = pApp
return nil
}
func (p *FakeProvisioner) Destroy(app provision.App) error {
if err := p.getError("Destroy"); err != nil {
return err
}
if !p.Provisioned(app) {
return errNotProvisioned
}
p.mut.Lock()
defer p.mut.Unlock()
delete(p.apps, app.GetName())
return nil
}
func (p *FakeProvisioner) AddUnits(app provision.App, n uint, process string, w io.Writer) error {
_, err := p.AddUnitsToNode(app, n, process, w, "")
return err
}
func (p *FakeProvisioner) AddUnitsToNode(app provision.App, n uint, process string, w io.Writer, nodeAddr string) ([]provision.Unit, error) {
if err := p.getError("AddUnits"); err != nil {
return nil, err
}
if n == 0 {
return nil, errors.New("Cannot add 0 units.")
}
p.mut.Lock()
defer p.mut.Unlock()
pApp, ok := p.apps[app.GetName()]
if !ok {
return nil, errNotProvisioned
}
name := app.GetName()
platform := app.GetPlatform()
length := uint(len(pApp.units))
var addresses []*url.URL
for i := uint(0); i < n; i++ {
val := atomic.AddInt32(&uniqueIpCounter, 1)
var hostAddr string
if nodeAddr != "" {
hostAddr = net.URLToHost(nodeAddr)
} else if len(p.nodes) > 0 {
for _, n := range p.nodes {
hostAddr = net.URLToHost(n.Address())
break
}
} else {
hostAddr = fmt.Sprintf("10.10.10.%d", val)
}
unit := provision.Unit{
ID: fmt.Sprintf("%s-%d", name, pApp.unitLen),
AppName: name,
Type: platform,
Status: provision.StatusStarted,
IP: hostAddr,
ProcessName: process,
Address: &url.URL{
Scheme: "http",
Host: fmt.Sprintf("%s:%d", hostAddr, val),
},
}
addresses = append(addresses, unit.Address)
pApp.units = append(pApp.units, unit)
pApp.unitLen++
}
err := routertest.FakeRouter.AddRoutes(name, addresses)
if err != nil {
return nil, err
}
result := make([]provision.Unit, int(n))
copy(result, pApp.units[length:])
p.apps[app.GetName()] = pApp
if w != nil {
fmt.Fprintf(w, "added %d units", n)
}
return result, nil
}
func (p *FakeProvisioner) RemoveUnits(app provision.App, n uint, process string, w io.Writer) error {
if err := p.getError("RemoveUnits"); err != nil {
return err
}
if n == 0 {
return errors.New("cannot remove 0 units")
}
p.mut.Lock()
defer p.mut.Unlock()
pApp, ok := p.apps[app.GetName()]
if !ok {
return errNotProvisioned
}
var newUnits []provision.Unit
removedCount := n
var addresses []*url.URL
for _, u := range pApp.units {
if removedCount > 0 && u.ProcessName == process {
removedCount--
addresses = append(addresses, u.Address)
continue
}
newUnits = append(newUnits, u)
}
err := routertest.FakeRouter.RemoveRoutes(app.GetName(), addresses)
if err != nil {
return err
}
if removedCount > 0 {
return errors.New("too many units to remove")
}
if w != nil {
fmt.Fprintf(w, "removing %d units", n)
}
pApp.units = newUnits
pApp.unitLen = len(newUnits)
p.apps[app.GetName()] = pApp
return nil
}
// ExecuteCommand will pretend to execute the given command, recording data
// about it.
//
// The output of the command must be prepared with PrepareOutput, and failures
// must be prepared with PrepareFailure. In case of failure, the prepared
// output will be sent to the standard error stream, otherwise, it will be sent
// to the standard error stream.
//
// When there is no output nor failure prepared, ExecuteCommand will return a
// timeout error.
func (p *FakeProvisioner) ExecuteCommand(stdout, stderr io.Writer, app provision.App, cmd string, args ...string) error {
var (
output []byte
err error
)
command := Cmd{
Cmd: cmd,
Args: args,
App: app,
}
p.cmdMut.Lock()
p.cmds = append(p.cmds, command)
p.cmdMut.Unlock()
units, err := p.Units(app)
if err != nil {
return err
}
for range units {
select {
case output = <-p.outputs:
select {
case fail := <-p.failures:
if fail.method == "ExecuteCommand" {
stderr.Write(output)
return fail.err
}
p.failures <- fail
default:
stdout.Write(output)
}
case fail := <-p.failures:
if fail.method == "ExecuteCommand" {
err = fail.err
select {
case output = <-p.outputs:
stderr.Write(output)
default:
}
} else {
p.failures <- fail
}
case <-time.After(2e9):
return errors.New("FakeProvisioner timed out waiting for output.")
}
}
return err
}
func (p *FakeProvisioner) ExecuteCommandOnce(stdout, stderr io.Writer, app provision.App, cmd string, args ...string) error {
var output []byte
command := Cmd{
Cmd: cmd,
Args: args,
App: app,
}
p.cmdMut.Lock()
p.cmds = append(p.cmds, command)
p.cmdMut.Unlock()
select {
case output = <-p.outputs:
stdout.Write(output)
case fail := <-p.failures:
if fail.method == "ExecuteCommandOnce" {
select {
case output = <-p.outputs:
stderr.Write(output)
default:
}
return fail.err
} else {
p.failures <- fail
}
case <-time.After(2e9):
return errors.New("FakeProvisioner timed out waiting for output.")
}
return nil
}
func (p *FakeProvisioner) ExecuteCommandIsolated(stdout, stderr io.Writer, app provision.App, cmd string, args ...string) error {
var output []byte
command := Cmd{
Cmd: cmd,
Args: args,
App: app,
}
p.cmdMut.Lock()
p.cmds = append(p.cmds, command)
p.cmdMut.Unlock()
select {
case output = <-p.outputs:
stdout.Write(output)
case fail := <-p.failures:
if fail.method == "ExecuteCommandIsolated" {
select {
case output = <-p.outputs:
stderr.Write(output)
default:
}
return fail.err
} else {
p.failures <- fail
}
case <-time.After(2e9):
return errors.New("FakeProvisioner timed out waiting for output.")
}
return nil
}
func (p *FakeProvisioner) AddUnit(app provision.App, unit provision.Unit) {
p.mut.Lock()
defer p.mut.Unlock()
a := p.apps[app.GetName()]
a.units = append(a.units, unit)
a.unitLen++
p.apps[app.GetName()] = a
}
func (p *FakeProvisioner) Units(apps ...provision.App) ([]provision.Unit, error) {
if err := p.getError("Units"); err != nil {
return nil, err
}
p.mut.Lock()
defer p.mut.Unlock()
var allUnits []provision.Unit
for _, a := range apps {
allUnits = append(allUnits, p.apps[a.GetName()].units...)
}
return allUnits, nil
}
func (p *FakeProvisioner) RoutableAddresses(app provision.App) ([]url.URL, error) {
p.mut.Lock()
defer p.mut.Unlock()
units := p.apps[app.GetName()].units
addrs := make([]url.URL, len(units))
for i := range units {
addrs[i] = *units[i].Address
}
return addrs, nil
}
func (p *FakeProvisioner) SetUnitStatus(unit provision.Unit, status provision.Status) error {
p.mut.Lock()
defer p.mut.Unlock()
var units []provision.Unit
if unit.AppName == "" {
units = p.getAllUnits()
} else {
app, ok := p.apps[unit.AppName]
if !ok {
return errNotProvisioned
}
units = app.units
}
index := -1
for i, unt := range units {
if unt.ID == unit.ID {
index = i
unit.AppName = unt.AppName
break
}
}
if index < 0 {
return &provision.UnitNotFoundError{ID: unit.ID}
}
app := p.apps[unit.AppName]
app.units[index].Status = status
p.apps[unit.AppName] = app
return nil
}
func (p *FakeProvisioner) getAllUnits() []provision.Unit {
var units []provision.Unit
for _, app := range p.apps {
units = append(units, app.units...)
}
return units
}
func (p *FakeProvisioner) Addr(app provision.App) (string, error) {
if err := p.getError("Addr"); err != nil {
return "", err
}
return routertest.FakeRouter.Addr(app.GetName())
}
func (p *FakeProvisioner) SetCName(app provision.App, cname string) error {
if err := p.getError("SetCName"); err != nil {
return err
}
p.mut.Lock()
defer p.mut.Unlock()
pApp, ok := p.apps[app.GetName()]
if !ok {
return errNotProvisioned
}
pApp.cnames = append(pApp.cnames, cname)
p.apps[app.GetName()] = pApp
return routertest.FakeRouter.SetCName(cname, app.GetName())
}
func (p *FakeProvisioner) UnsetCName(app provision.App, cname string) error {
if err := p.getError("UnsetCName"); err != nil {
return err
}
p.mut.Lock()
defer p.mut.Unlock()
pApp, ok := p.apps[app.GetName()]
if !ok {
return errNotProvisioned
}
pApp.cnames = []string{}
p.apps[app.GetName()] = pApp
return routertest.FakeRouter.UnsetCName(cname, app.GetName())
}
func (p *FakeProvisioner) HasCName(app provision.App, cname string) bool {
p.mut.RLock()
pApp, ok := p.apps[app.GetName()]
p.mut.RUnlock()
for _, cnameApp := range pApp.cnames {
if cnameApp == cname {
return ok && true
}
}
return false
}
func (p *FakeProvisioner) Stop(app provision.App, process string) error {
p.mut.Lock()
defer p.mut.Unlock()
pApp, ok := p.apps[app.GetName()]
if !ok {
return errNotProvisioned
}
pApp.stops[process]++
for i, u := range pApp.units {
u.Status = provision.StatusStopped
pApp.units[i] = u
}
p.apps[app.GetName()] = pApp
return nil
}
func (p *FakeProvisioner) Sleep(app provision.App, process string) error {
p.mut.Lock()
defer p.mut.Unlock()
pApp, ok := p.apps[app.GetName()]
if !ok {
return errNotProvisioned
}
pApp.sleeps[process]++
for i, u := range pApp.units {
u.Status = provision.StatusAsleep
pApp.units[i] = u
}
p.apps[app.GetName()] = pApp
return nil
}
func (p *FakeProvisioner) RegisterUnit(a provision.App, unitId string, customData map[string]interface{}) error {
p.mut.Lock()
defer p.mut.Unlock()
pa, ok := p.apps[a.GetName()]
if !ok {
return errors.New("app not found")
}
pa.lastData = customData
for i, u := range pa.units {
if u.ID == unitId {
u.IP = u.IP + "-updated"
pa.units[i] = u
p.apps[a.GetName()] = pa
return nil
}
}
return &provision.UnitNotFoundError{ID: unitId}
}
func (p *FakeProvisioner) Shell(opts provision.ShellOptions) error {
var unit provision.Unit
units, err := p.Units(opts.App)
if err != nil {
return err
}
if len(units) == 0 {
return errors.New("app has no units")
} else if opts.Unit != "" {
for _, u := range units {
if u.ID == opts.Unit {
unit = u
break
}
}
} else {
unit = units[0]
}
if unit.ID == "" {
return errors.New("unit not found")
}
p.shellMut.Lock()
defer p.shellMut.Unlock()
p.shells[unit.ID] = append(p.shells[unit.ID], opts)
return nil
}
func (p *FakeProvisioner) FilterAppsByUnitStatus(apps []provision.App, status []string) ([]provision.App, error) {
filteredApps := []provision.App{}
for i := range apps {
units, _ := p.Units(apps[i])
for _, u := range units {
if stringInArray(u.Status.String(), status) {
filteredApps = append(filteredApps, apps[i])
break
}
}
}
return filteredApps, nil
}
func (p *FakeProvisioner) GetName() string {
return p.Name
}
func (p *FakeProvisioner) UpgradeNodeContainer(name string, pool string, writer io.Writer) error {
p.nodeContainers[name+"-"+pool]++
return nil
}
func (p *FakeProvisioner) RemoveNodeContainer(name string, pool string, writer io.Writer) error {
p.nodeContainers[name+"-"+pool] = 0
return nil
}
func (p *FakeProvisioner) HasNodeContainer(name string, pool string) bool {
return p.nodeContainers[name+"-"+pool] > 0
}
func stringInArray(value string, array []string) bool {
for _, str := range array {
if str == value {
return true
}
}
return false
}
type PipelineFakeProvisioner struct {
*FakeProvisioner
executedPipeline bool
}
func (p *PipelineFakeProvisioner) ExecutedPipeline() bool {
return p.executedPipeline
}
func (p *PipelineFakeProvisioner) DeployPipeline() *action.Pipeline {
act := action.Action{
Name: "change-executed-pipeline",
Forward: func(ctx action.FWContext) (action.Result, error) {
p.executedPipeline = true
return nil, nil
},
Backward: func(ctx action.BWContext) {
},
}
actions := []*action.Action{&act}
pipeline := action.NewPipeline(actions...)
return pipeline
}
type PipelineErrorFakeProvisioner struct {
*FakeProvisioner
}
func (p *PipelineErrorFakeProvisioner) DeployPipeline() *action.Pipeline {
act := action.Action{
Name: "error-pipeline",
Forward: func(ctx action.FWContext) (action.Result, error) {
return nil, errors.New("deploy error")
},
Backward: func(ctx action.BWContext) {
},
}
actions := []*action.Action{&act}
pipeline := action.NewPipeline(actions...)
return pipeline
}
type provisionedApp struct {
units []provision.Unit
app provision.App
restarts map[string]int
starts map[string]int
stops map[string]int
sleeps map[string]int
lastArchive string
lastFile io.ReadCloser
cnames []string
unitLen int
lastData map[string]interface{}
image string
}
|
ggarnier/tsuru
|
provision/provisiontest/fake_provisioner.go
|
GO
|
bsd-3-clause
| 35,129 |
/******************************************************************************
*
* Copyright(c) 2009-2012 Realtek Corporation.
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of version 2 of the GNU General Public License as
* published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License along with
* this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110, USA
*
* The full GNU General Public License is included in this distribution in the
* file called LICENSE.
*
* Contact Information:
* wlanfae <[email protected]>
* Realtek Corporation, No. 2, Innovation Road II, Hsinchu Science Park,
* Hsinchu 300, Taiwan.
*
* Larry Finger <[email protected]>
*
*****************************************************************************/
#ifndef __RTL_REGD_H__
#define __RTL_REGD_H__
struct country_code_to_enum_rd {
u16 countrycode;
const char *iso_name;
};
enum country_code_type_t {
COUNTRY_CODE_FCC = 0,
COUNTRY_CODE_IC = 1,
COUNTRY_CODE_ETSI = 2,
COUNTRY_CODE_SPAIN = 3,
COUNTRY_CODE_FRANCE = 4,
COUNTRY_CODE_MKK = 5,
COUNTRY_CODE_MKK1 = 6,
COUNTRY_CODE_ISRAEL = 7,
COUNTRY_CODE_TELEC = 8,
COUNTRY_CODE_MIC = 9,
COUNTRY_CODE_GLOBAL_DOMAIN = 10,
COUNTRY_CODE_WORLD_WIDE_13 = 11,
COUNTRY_CODE_TELEC_NETGEAR = 12,
/*add new channel plan above this line */
COUNTRY_CODE_MAX
};
int rtl_regd_init(struct ieee80211_hw *hw,
int (*reg_notifier) (struct wiphy *wiphy,
struct regulatory_request *request));
int rtl_reg_notifier(struct wiphy *wiphy, struct regulatory_request *request);
#endif
|
talnoah/android_kernel_htc_dlx
|
virt/drivers/net/wireless/rtlwifi/regd.h
|
C
|
gpl-2.0
| 1,963 |
var request = require('request'),
log = require('bole')('npme-send-data'),
config = require('../../../config')
module.exports = function (formGuid, data, callback) {
var hubspot = config.license.hubspot.forms
.replace(":portal_id", config.license.hubspot.portal_id)
.replace(":form_guid", formGuid);
request.post(hubspot, function (er, resp) {
// we can ignore 302 responses
if (resp.statusCode === 204 || resp.statusCode === 302) {
return callback(null);
}
log.error('unexpected status code from hubspot; status=' + resp.statusCode + '; data=', data);
callback(new Error('unexpected status code: ' + resp.statusCode));
}).form(data);
}
|
alexindigo/newww
|
services/npme/methods/sendData.js
|
JavaScript
|
isc
| 693 |
module.exports = {
env: {
mocha: true
},
plugins: [
'mocha'
]
};
|
pghalliday/recursive-semver
|
test/.eslintrc.js
|
JavaScript
|
isc
| 81 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
-- | Converting 'MC' programs to 'MCMem'.
module Futhark.Pass.ExplicitAllocations.MC (explicitAllocations) where
import Futhark.IR.MC
import Futhark.IR.MCMem
import Futhark.Pass.ExplicitAllocations
import Futhark.Pass.ExplicitAllocations.SegOp
instance SizeSubst (MCOp rep op) where
opSizeSubst _ _ = mempty
handleSegOp :: SegOp () MC -> AllocM MC MCMem (SegOp () MCMem)
handleSegOp op = do
let num_threads = intConst Int64 256 -- FIXME
mapSegOpM (mapper num_threads) op
where
scope = scopeOfSegSpace $ segSpace op
mapper num_threads =
identitySegOpMapper
{ mapOnSegOpBody =
localScope scope . allocInKernelBody,
mapOnSegOpLambda =
allocInBinOpLambda num_threads (segSpace op)
}
handleMCOp :: Op MC -> AllocM MC MCMem (Op MCMem)
handleMCOp (ParOp par_op op) =
Inner <$> (ParOp <$> traverse handleSegOp par_op <*> handleSegOp op)
handleMCOp (OtherOp soac) =
error $ "Cannot allocate memory in SOAC: " ++ pretty soac
-- | The pass from 'MC' to 'MCMem'.
explicitAllocations :: Pass MC MCMem
explicitAllocations = explicitAllocationsGeneric handleMCOp defaultExpHints
|
HIPERFIT/futhark
|
src/Futhark/Pass/ExplicitAllocations/MC.hs
|
Haskell
|
isc
| 1,283 |
/*******************************************************************************
*
* Module Name: rscalc - Calculate stream and list lengths
*
******************************************************************************/
/*
* Copyright (C) 2000 - 2016, Intel Corp.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions, and the following disclaimer,
* without modification.
* 2. Redistributions in binary form must reproduce at minimum a disclaimer
* substantially similar to the "NO WARRANTY" disclaimer below
* ("Disclaimer") and any redistribution must be conditioned upon
* including a substantially similar Disclaimer requirement for further
* binary redistribution.
* 3. Neither the names of the above-listed copyright holders nor the names
* of any contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* Alternatively, this software may be distributed under the terms of the
* GNU General Public License ("GPL") version 2 as published by the Free
* Software Foundation.
*
* NO WARRANTY
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTIBILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* HOLDERS OR CONTRIBUTORS BE LIABLE FOR SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
* IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGES.
*/
#include <contrib/dev/acpica/include/acpi.h>
#include <contrib/dev/acpica/include/accommon.h>
#include <contrib/dev/acpica/include/acresrc.h>
#include <contrib/dev/acpica/include/acnamesp.h>
#define _COMPONENT ACPI_RESOURCES
ACPI_MODULE_NAME ("rscalc")
/* Local prototypes */
static UINT8
AcpiRsCountSetBits (
UINT16 BitField);
static ACPI_RS_LENGTH
AcpiRsStructOptionLength (
ACPI_RESOURCE_SOURCE *ResourceSource);
static UINT32
AcpiRsStreamOptionLength (
UINT32 ResourceLength,
UINT32 MinimumTotalLength);
/*******************************************************************************
*
* FUNCTION: AcpiRsCountSetBits
*
* PARAMETERS: BitField - Field in which to count bits
*
* RETURN: Number of bits set within the field
*
* DESCRIPTION: Count the number of bits set in a resource field. Used for
* (Short descriptor) interrupt and DMA lists.
*
******************************************************************************/
static UINT8
AcpiRsCountSetBits (
UINT16 BitField)
{
UINT8 BitsSet;
ACPI_FUNCTION_ENTRY ();
for (BitsSet = 0; BitField; BitsSet++)
{
/* Zero the least significant bit that is set */
BitField &= (UINT16) (BitField - 1);
}
return (BitsSet);
}
/*******************************************************************************
*
* FUNCTION: AcpiRsStructOptionLength
*
* PARAMETERS: ResourceSource - Pointer to optional descriptor field
*
* RETURN: Status
*
* DESCRIPTION: Common code to handle optional ResourceSourceIndex and
* ResourceSource fields in some Large descriptors. Used during
* list-to-stream conversion
*
******************************************************************************/
static ACPI_RS_LENGTH
AcpiRsStructOptionLength (
ACPI_RESOURCE_SOURCE *ResourceSource)
{
ACPI_FUNCTION_ENTRY ();
/*
* If the ResourceSource string is valid, return the size of the string
* (StringLength includes the NULL terminator) plus the size of the
* ResourceSourceIndex (1).
*/
if (ResourceSource->StringPtr)
{
return ((ACPI_RS_LENGTH) (ResourceSource->StringLength + 1));
}
return (0);
}
/*******************************************************************************
*
* FUNCTION: AcpiRsStreamOptionLength
*
* PARAMETERS: ResourceLength - Length from the resource header
* MinimumTotalLength - Minimum length of this resource, before
* any optional fields. Includes header size
*
* RETURN: Length of optional string (0 if no string present)
*
* DESCRIPTION: Common code to handle optional ResourceSourceIndex and
* ResourceSource fields in some Large descriptors. Used during
* stream-to-list conversion
*
******************************************************************************/
static UINT32
AcpiRsStreamOptionLength (
UINT32 ResourceLength,
UINT32 MinimumAmlResourceLength)
{
UINT32 StringLength = 0;
ACPI_FUNCTION_ENTRY ();
/*
* The ResourceSourceIndex and ResourceSource are optional elements of
* some Large-type resource descriptors.
*/
/*
* If the length of the actual resource descriptor is greater than the
* ACPI spec-defined minimum length, it means that a ResourceSourceIndex
* exists and is followed by a (required) null terminated string. The
* string length (including the null terminator) is the resource length
* minus the minimum length, minus one byte for the ResourceSourceIndex
* itself.
*/
if (ResourceLength > MinimumAmlResourceLength)
{
/* Compute the length of the optional string */
StringLength = ResourceLength - MinimumAmlResourceLength - 1;
}
/*
* Round the length up to a multiple of the native word in order to
* guarantee that the entire resource descriptor is native word aligned
*/
return ((UINT32) ACPI_ROUND_UP_TO_NATIVE_WORD (StringLength));
}
/*******************************************************************************
*
* FUNCTION: AcpiRsGetAmlLength
*
* PARAMETERS: Resource - Pointer to the resource linked list
* ResourceListSize - Size of the resource linked list
* SizeNeeded - Where the required size is returned
*
* RETURN: Status
*
* DESCRIPTION: Takes a linked list of internal resource descriptors and
* calculates the size buffer needed to hold the corresponding
* external resource byte stream.
*
******************************************************************************/
ACPI_STATUS
AcpiRsGetAmlLength (
ACPI_RESOURCE *Resource,
ACPI_SIZE ResourceListSize,
ACPI_SIZE *SizeNeeded)
{
ACPI_SIZE AmlSizeNeeded = 0;
ACPI_RESOURCE *ResourceEnd;
ACPI_RS_LENGTH TotalSize;
ACPI_FUNCTION_TRACE (RsGetAmlLength);
/* Traverse entire list of internal resource descriptors */
ResourceEnd = ACPI_ADD_PTR (ACPI_RESOURCE, Resource, ResourceListSize);
while (Resource < ResourceEnd)
{
/* Validate the descriptor type */
if (Resource->Type > ACPI_RESOURCE_TYPE_MAX)
{
return_ACPI_STATUS (AE_AML_INVALID_RESOURCE_TYPE);
}
/* Sanity check the length. It must not be zero, or we loop forever */
if (!Resource->Length)
{
return_ACPI_STATUS (AE_AML_BAD_RESOURCE_LENGTH);
}
/* Get the base size of the (external stream) resource descriptor */
TotalSize = AcpiGbl_AmlResourceSizes [Resource->Type];
/*
* Augment the base size for descriptors with optional and/or
* variable-length fields
*/
switch (Resource->Type)
{
case ACPI_RESOURCE_TYPE_IRQ:
/* Length can be 3 or 2 */
if (Resource->Data.Irq.DescriptorLength == 2)
{
TotalSize--;
}
break;
case ACPI_RESOURCE_TYPE_START_DEPENDENT:
/* Length can be 1 or 0 */
if (Resource->Data.Irq.DescriptorLength == 0)
{
TotalSize--;
}
break;
case ACPI_RESOURCE_TYPE_VENDOR:
/*
* Vendor Defined Resource:
* For a Vendor Specific resource, if the Length is between 1 and 7
* it will be created as a Small Resource data type, otherwise it
* is a Large Resource data type.
*/
if (Resource->Data.Vendor.ByteLength > 7)
{
/* Base size of a Large resource descriptor */
TotalSize = sizeof (AML_RESOURCE_LARGE_HEADER);
}
/* Add the size of the vendor-specific data */
TotalSize = (ACPI_RS_LENGTH)
(TotalSize + Resource->Data.Vendor.ByteLength);
break;
case ACPI_RESOURCE_TYPE_END_TAG:
/*
* End Tag:
* We are done -- return the accumulated total size.
*/
*SizeNeeded = AmlSizeNeeded + TotalSize;
/* Normal exit */
return_ACPI_STATUS (AE_OK);
case ACPI_RESOURCE_TYPE_ADDRESS16:
/*
* 16-Bit Address Resource:
* Add the size of the optional ResourceSource info
*/
TotalSize = (ACPI_RS_LENGTH) (TotalSize +
AcpiRsStructOptionLength (
&Resource->Data.Address16.ResourceSource));
break;
case ACPI_RESOURCE_TYPE_ADDRESS32:
/*
* 32-Bit Address Resource:
* Add the size of the optional ResourceSource info
*/
TotalSize = (ACPI_RS_LENGTH) (TotalSize +
AcpiRsStructOptionLength (
&Resource->Data.Address32.ResourceSource));
break;
case ACPI_RESOURCE_TYPE_ADDRESS64:
/*
* 64-Bit Address Resource:
* Add the size of the optional ResourceSource info
*/
TotalSize = (ACPI_RS_LENGTH) (TotalSize +
AcpiRsStructOptionLength (
&Resource->Data.Address64.ResourceSource));
break;
case ACPI_RESOURCE_TYPE_EXTENDED_IRQ:
/*
* Extended IRQ Resource:
* Add the size of each additional optional interrupt beyond the
* required 1 (4 bytes for each UINT32 interrupt number)
*/
TotalSize = (ACPI_RS_LENGTH) (TotalSize +
((Resource->Data.ExtendedIrq.InterruptCount - 1) * 4) +
/* Add the size of the optional ResourceSource info */
AcpiRsStructOptionLength (
&Resource->Data.ExtendedIrq.ResourceSource));
break;
case ACPI_RESOURCE_TYPE_GPIO:
TotalSize = (ACPI_RS_LENGTH) (TotalSize +
(Resource->Data.Gpio.PinTableLength * 2) +
Resource->Data.Gpio.ResourceSource.StringLength +
Resource->Data.Gpio.VendorLength);
break;
case ACPI_RESOURCE_TYPE_SERIAL_BUS:
TotalSize = AcpiGbl_AmlResourceSerialBusSizes [
Resource->Data.CommonSerialBus.Type];
TotalSize = (ACPI_RS_LENGTH) (TotalSize +
Resource->Data.I2cSerialBus.ResourceSource.StringLength +
Resource->Data.I2cSerialBus.VendorLength);
break;
default:
break;
}
/* Update the total */
AmlSizeNeeded += TotalSize;
/* Point to the next object */
Resource = ACPI_ADD_PTR (ACPI_RESOURCE, Resource, Resource->Length);
}
/* Did not find an EndTag resource descriptor */
return_ACPI_STATUS (AE_AML_NO_RESOURCE_END_TAG);
}
/*******************************************************************************
*
* FUNCTION: AcpiRsGetListLength
*
* PARAMETERS: AmlBuffer - Pointer to the resource byte stream
* AmlBufferLength - Size of AmlBuffer
* SizeNeeded - Where the size needed is returned
*
* RETURN: Status
*
* DESCRIPTION: Takes an external resource byte stream and calculates the size
* buffer needed to hold the corresponding internal resource
* descriptor linked list.
*
******************************************************************************/
ACPI_STATUS
AcpiRsGetListLength (
UINT8 *AmlBuffer,
UINT32 AmlBufferLength,
ACPI_SIZE *SizeNeeded)
{
ACPI_STATUS Status;
UINT8 *EndAml;
UINT8 *Buffer;
UINT32 BufferSize;
UINT16 Temp16;
UINT16 ResourceLength;
UINT32 ExtraStructBytes;
UINT8 ResourceIndex;
UINT8 MinimumAmlResourceLength;
AML_RESOURCE *AmlResource;
ACPI_FUNCTION_TRACE (RsGetListLength);
*SizeNeeded = ACPI_RS_SIZE_MIN; /* Minimum size is one EndTag */
EndAml = AmlBuffer + AmlBufferLength;
/* Walk the list of AML resource descriptors */
while (AmlBuffer < EndAml)
{
/* Validate the Resource Type and Resource Length */
Status = AcpiUtValidateResource (NULL, AmlBuffer, &ResourceIndex);
if (ACPI_FAILURE (Status))
{
/*
* Exit on failure. Cannot continue because the descriptor length
* may be bogus also.
*/
return_ACPI_STATUS (Status);
}
AmlResource = (void *) AmlBuffer;
/* Get the resource length and base (minimum) AML size */
ResourceLength = AcpiUtGetResourceLength (AmlBuffer);
MinimumAmlResourceLength = AcpiGbl_ResourceAmlSizes[ResourceIndex];
/*
* Augment the size for descriptors with optional
* and/or variable length fields
*/
ExtraStructBytes = 0;
Buffer = AmlBuffer + AcpiUtGetResourceHeaderLength (AmlBuffer);
switch (AcpiUtGetResourceType (AmlBuffer))
{
case ACPI_RESOURCE_NAME_IRQ:
/*
* IRQ Resource:
* Get the number of bits set in the 16-bit IRQ mask
*/
ACPI_MOVE_16_TO_16 (&Temp16, Buffer);
ExtraStructBytes = AcpiRsCountSetBits (Temp16);
break;
case ACPI_RESOURCE_NAME_DMA:
/*
* DMA Resource:
* Get the number of bits set in the 8-bit DMA mask
*/
ExtraStructBytes = AcpiRsCountSetBits (*Buffer);
break;
case ACPI_RESOURCE_NAME_VENDOR_SMALL:
case ACPI_RESOURCE_NAME_VENDOR_LARGE:
/*
* Vendor Resource:
* Get the number of vendor data bytes
*/
ExtraStructBytes = ResourceLength;
/*
* There is already one byte included in the minimum
* descriptor size. If there are extra struct bytes,
* subtract one from the count.
*/
if (ExtraStructBytes)
{
ExtraStructBytes--;
}
break;
case ACPI_RESOURCE_NAME_END_TAG:
/*
* End Tag: This is the normal exit
*/
return_ACPI_STATUS (AE_OK);
case ACPI_RESOURCE_NAME_ADDRESS32:
case ACPI_RESOURCE_NAME_ADDRESS16:
case ACPI_RESOURCE_NAME_ADDRESS64:
/*
* Address Resource:
* Add the size of the optional ResourceSource
*/
ExtraStructBytes = AcpiRsStreamOptionLength (
ResourceLength, MinimumAmlResourceLength);
break;
case ACPI_RESOURCE_NAME_EXTENDED_IRQ:
/*
* Extended IRQ Resource:
* Using the InterruptTableLength, add 4 bytes for each additional
* interrupt. Note: at least one interrupt is required and is
* included in the minimum descriptor size (reason for the -1)
*/
ExtraStructBytes = (Buffer[1] - 1) * sizeof (UINT32);
/* Add the size of the optional ResourceSource */
ExtraStructBytes += AcpiRsStreamOptionLength (
ResourceLength - ExtraStructBytes, MinimumAmlResourceLength);
break;
case ACPI_RESOURCE_NAME_GPIO:
/* Vendor data is optional */
if (AmlResource->Gpio.VendorLength)
{
ExtraStructBytes +=
AmlResource->Gpio.VendorOffset -
AmlResource->Gpio.PinTableOffset +
AmlResource->Gpio.VendorLength;
}
else
{
ExtraStructBytes +=
AmlResource->LargeHeader.ResourceLength +
sizeof (AML_RESOURCE_LARGE_HEADER) -
AmlResource->Gpio.PinTableOffset;
}
break;
case ACPI_RESOURCE_NAME_SERIAL_BUS:
MinimumAmlResourceLength = AcpiGbl_ResourceAmlSerialBusSizes[
AmlResource->CommonSerialBus.Type];
ExtraStructBytes +=
AmlResource->CommonSerialBus.ResourceLength -
MinimumAmlResourceLength;
break;
default:
break;
}
/*
* Update the required buffer size for the internal descriptor structs
*
* Important: Round the size up for the appropriate alignment. This
* is a requirement on IA64.
*/
if (AcpiUtGetResourceType (AmlBuffer) ==
ACPI_RESOURCE_NAME_SERIAL_BUS)
{
BufferSize = AcpiGbl_ResourceStructSerialBusSizes[
AmlResource->CommonSerialBus.Type] + ExtraStructBytes;
}
else
{
BufferSize = AcpiGbl_ResourceStructSizes[ResourceIndex] +
ExtraStructBytes;
}
BufferSize = (UINT32) ACPI_ROUND_UP_TO_NATIVE_WORD (BufferSize);
*SizeNeeded += BufferSize;
ACPI_DEBUG_PRINT ((ACPI_DB_RESOURCES,
"Type %.2X, AmlLength %.2X InternalLength %.2X\n",
AcpiUtGetResourceType (AmlBuffer),
AcpiUtGetDescriptorLength (AmlBuffer), BufferSize));
/*
* Point to the next resource within the AML stream using the length
* contained in the resource descriptor header
*/
AmlBuffer += AcpiUtGetDescriptorLength (AmlBuffer);
}
/* Did not find an EndTag resource descriptor */
return_ACPI_STATUS (AE_AML_NO_RESOURCE_END_TAG);
}
/*******************************************************************************
*
* FUNCTION: AcpiRsGetPciRoutingTableLength
*
* PARAMETERS: PackageObject - Pointer to the package object
* BufferSizeNeeded - UINT32 pointer of the size buffer
* needed to properly return the
* parsed data
*
* RETURN: Status
*
* DESCRIPTION: Given a package representing a PCI routing table, this
* calculates the size of the corresponding linked list of
* descriptions.
*
******************************************************************************/
ACPI_STATUS
AcpiRsGetPciRoutingTableLength (
ACPI_OPERAND_OBJECT *PackageObject,
ACPI_SIZE *BufferSizeNeeded)
{
UINT32 NumberOfElements;
ACPI_SIZE TempSizeNeeded = 0;
ACPI_OPERAND_OBJECT **TopObjectList;
UINT32 Index;
ACPI_OPERAND_OBJECT *PackageElement;
ACPI_OPERAND_OBJECT **SubObjectList;
BOOLEAN NameFound;
UINT32 TableIndex;
ACPI_FUNCTION_TRACE (RsGetPciRoutingTableLength);
NumberOfElements = PackageObject->Package.Count;
/*
* Calculate the size of the return buffer.
* The base size is the number of elements * the sizes of the
* structures. Additional space for the strings is added below.
* The minus one is to subtract the size of the UINT8 Source[1]
* member because it is added below.
*
* But each PRT_ENTRY structure has a pointer to a string and
* the size of that string must be found.
*/
TopObjectList = PackageObject->Package.Elements;
for (Index = 0; Index < NumberOfElements; Index++)
{
/* Dereference the subpackage */
PackageElement = *TopObjectList;
/* We must have a valid Package object */
if (!PackageElement ||
(PackageElement->Common.Type != ACPI_TYPE_PACKAGE))
{
return_ACPI_STATUS (AE_AML_OPERAND_TYPE);
}
/*
* The SubObjectList will now point to an array of the
* four IRQ elements: Address, Pin, Source and SourceIndex
*/
SubObjectList = PackageElement->Package.Elements;
/* Scan the IrqTableElements for the Source Name String */
NameFound = FALSE;
for (TableIndex = 0;
TableIndex < PackageElement->Package.Count && !NameFound;
TableIndex++)
{
if (*SubObjectList && /* Null object allowed */
((ACPI_TYPE_STRING ==
(*SubObjectList)->Common.Type) ||
((ACPI_TYPE_LOCAL_REFERENCE ==
(*SubObjectList)->Common.Type) &&
((*SubObjectList)->Reference.Class ==
ACPI_REFCLASS_NAME))))
{
NameFound = TRUE;
}
else
{
/* Look at the next element */
SubObjectList++;
}
}
TempSizeNeeded += (sizeof (ACPI_PCI_ROUTING_TABLE) - 4);
/* Was a String type found? */
if (NameFound)
{
if ((*SubObjectList)->Common.Type == ACPI_TYPE_STRING)
{
/*
* The length String.Length field does not include the
* terminating NULL, add 1
*/
TempSizeNeeded += ((ACPI_SIZE)
(*SubObjectList)->String.Length + 1);
}
else
{
TempSizeNeeded += AcpiNsGetPathnameLength (
(*SubObjectList)->Reference.Node);
}
}
else
{
/*
* If no name was found, then this is a NULL, which is
* translated as a UINT32 zero.
*/
TempSizeNeeded += sizeof (UINT32);
}
/* Round up the size since each element must be aligned */
TempSizeNeeded = ACPI_ROUND_UP_TO_64BIT (TempSizeNeeded);
/* Point to the next ACPI_OPERAND_OBJECT */
TopObjectList++;
}
/*
* Add an extra element to the end of the list, essentially a
* NULL terminator
*/
*BufferSizeNeeded = TempSizeNeeded + sizeof (ACPI_PCI_ROUTING_TABLE);
return_ACPI_STATUS (AE_OK);
}
|
TigerBSD/TigerBSD
|
FreeBSD/sys/contrib/dev/acpica/components/resources/rscalc.c
|
C
|
isc
| 23,744 |
/* eslint-disable flowtype/require-parameter-type, flowtype/require-return-type, no-magic-numbers */
import {test} from "tap"
import {spy} from "sinon"
import aside from "./"
test(({equal, end}) => {
const unction = spy(() => "b")
equal(aside([unction])("a"), "a")
end()
})
test(({ok, end}) => {
const unction = spy(() => "b")
aside([unction])("a")
ok(unction.calledWith("a"))
end()
})
test(({equal, end}) => {
const unction = spy(() => "b")
equal(aside([unction])("a"), "a")
end()
})
test(({ok, equal, end}) => {
const unctionA = spy(() => "b")
const unctionB = spy(() => "c")
equal(aside([unctionA, unctionB])("a"), "a")
ok(unctionA.calledWith("a"))
ok(unctionB.calledWith("b"))
end()
})
|
krainboltgreene/ramda-extra.js
|
package/aside/test.js
|
JavaScript
|
isc
| 737 |
/*-
* Copyright (c) 1999 M. Warner Losh <[email protected]>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/*
* Modifications for Megahertz X-Jack Ethernet Card (XJ-10BT)
*
* Copyright (c) 1996 by Tatsumi Hosokawa <[email protected]>
* BSD-nomads, Tokyo, Japan.
*/
#include <sys/cdefs.h>
__FBSDID("$FreeBSD$");
#include <sys/param.h>
#include <sys/bus.h>
#include <sys/kernel.h>
#include <sys/module.h>
#include <sys/socket.h>
#include <sys/systm.h>
#include <net/ethernet.h>
#include <net/if.h>
#include <net/if_arp.h>
#include <machine/bus.h>
#include <machine/resource.h>
#include <sys/rman.h>
#include <dev/pccard/pccardvar.h>
#include <dev/pccard/pccard_cis.h>
#include <dev/sn/if_snreg.h>
#include <dev/sn/if_snvar.h>
#include "card_if.h"
#include "pccarddevs.h"
typedef int sn_get_enaddr_t(device_t dev, u_char *eaddr);
typedef int sn_activate_t(device_t dev);
struct sn_sw
{
int type;
#define SN_NORMAL 1
#define SN_MEGAHERTZ 2
#define SN_OSITECH 3
#define SN_OSI_SOD 4
#define SN_MOTO_MARINER 5
char *typestr;
sn_get_enaddr_t *get_mac;
sn_activate_t *activate;
};
static sn_get_enaddr_t sn_pccard_normal_get_mac;
static sn_activate_t sn_pccard_normal_activate;
const static struct sn_sw sn_normal_sw = {
SN_NORMAL, "plain",
sn_pccard_normal_get_mac,
sn_pccard_normal_activate
};
static sn_get_enaddr_t sn_pccard_megahertz_get_mac;
static sn_activate_t sn_pccard_megahertz_activate;
const static struct sn_sw sn_mhz_sw = {
SN_MEGAHERTZ, "Megahertz",
sn_pccard_megahertz_get_mac,
sn_pccard_megahertz_activate
};
static const struct sn_product {
struct pccard_product prod;
const struct sn_sw *sw;
} sn_pccard_products[] = {
{ PCMCIA_CARD(DSPSI, XJEM1144), &sn_mhz_sw },
{ PCMCIA_CARD(DSPSI, XJACK), &sn_normal_sw },
/* { PCMCIA_CARD(MOTOROLA, MARINER), SN_MOTO_MARINER }, */
{ PCMCIA_CARD(NEWMEDIA, BASICS), &sn_normal_sw },
{ PCMCIA_CARD(MEGAHERTZ, VARIOUS), &sn_mhz_sw},
{ PCMCIA_CARD(MEGAHERTZ, XJEM3336), &sn_mhz_sw},
/* { PCMCIA_CARD(OSITECH, TRUMP_SOD), SN_OSI_SOD }, */
/* { PCMCIA_CARD(OSITECH, TRUMP_JOH), SN_OSITECH }, */
/* { PCMCIA_CARD(PSION, GOLDCARD), SN_OSITECH }, */
/* { PCMCIA_CARD(PSION, NETGLOBAL), SNI_OSI_SOD }, */
/* { PCMCIA_CARD(PSION, NETGLOBAL2), SN_OSITECH }, */
{ PCMCIA_CARD(SMC, 8020BT), &sn_normal_sw },
{ PCMCIA_CARD(SMC, SMC91C96), &sn_normal_sw },
{ { NULL } }
};
static const struct sn_product *
sn_pccard_lookup(device_t dev)
{
return ((const struct sn_product *)
pccard_product_lookup(dev,
(const struct pccard_product *)sn_pccard_products,
sizeof(sn_pccard_products[0]), NULL));
}
static int
sn_pccard_probe(device_t dev)
{
const struct sn_product *pp;
if ((pp = sn_pccard_lookup(dev)) != NULL) {
if (pp->prod.pp_name != NULL)
device_set_desc(dev, pp->prod.pp_name);
return 0;
}
return EIO;
}
static int
sn_pccard_ascii_enaddr(const char *str, u_char *enet)
{
uint8_t digit;
int i;
memset(enet, 0, ETHER_ADDR_LEN);
for (i = 0, digit = 0; i < (ETHER_ADDR_LEN * 2); i++) {
if (str[i] >= '0' && str[i] <= '9')
digit |= str[i] - '0';
else if (str[i] >= 'a' && str[i] <= 'f')
digit |= (str[i] - 'a') + 10;
else if (str[i] >= 'A' && str[i] <= 'F')
digit |= (str[i] - 'A') + 10;
else
return (0); /* Bogus digit!! */
/* Compensate for ordering of digits. */
if (i & 1) {
enet[i >> 1] = digit;
digit = 0;
} else
digit <<= 4;
}
return (1);
}
static int
sn_pccard_normal_get_mac(device_t dev, u_char *eaddr)
{
int i, sum;
const char *cisstr;
pccard_get_ether(dev, eaddr);
for (i = 0, sum = 0; i < ETHER_ADDR_LEN; i++)
sum |= eaddr[i];
if (sum == 0) {
pccard_get_cis3_str(dev, &cisstr);
if (cisstr && strlen(cisstr) == ETHER_ADDR_LEN * 2)
sum = sn_pccard_ascii_enaddr(cisstr, eaddr);
}
if (sum == 0) {
pccard_get_cis4_str(dev, &cisstr);
if (cisstr && strlen(cisstr) == ETHER_ADDR_LEN * 2)
sum = sn_pccard_ascii_enaddr(cisstr, eaddr);
}
return sum;
}
static int
sn_pccard_normal_activate(device_t dev)
{
int err;
err = sn_activate(dev);
if (err)
sn_deactivate(dev);
return (err);
}
static int
sn_pccard_megahertz_mac(const struct pccard_tuple *tuple, void *argp)
{
uint8_t *enaddr = argp;
int i;
uint8_t buffer[ETHER_ADDR_LEN * 2];
/* Code 0x81 is Megahertz' special cis node contianing the MAC */
if (tuple->code != 0x81)
return (0);
/* Make sure this is a sane node, as ASCII digits */
if (tuple->length != ETHER_ADDR_LEN * 2 + 1)
return (0);
/* Copy the MAC ADDR and return success if decoded */
for (i = 0; i < ETHER_ADDR_LEN * 2; i++)
buffer[i] = pccard_tuple_read_1(tuple, i);
return (sn_pccard_ascii_enaddr(buffer, enaddr));
}
static int
sn_pccard_megahertz_get_mac(device_t dev, u_char *eaddr)
{
if (sn_pccard_normal_get_mac(dev, eaddr))
return 1;
/*
* If that fails, try the special CIS tuple 0x81 that the
* '3288 and '3336 cards have. That tuple specifies an ASCII
* string, ala CIS3 or CIS4 in the 'normal' cards.
*/
return (pccard_cis_scan(dev, sn_pccard_megahertz_mac, eaddr));
}
static int
sn_pccard_megahertz_activate(device_t dev)
{
int err;
struct sn_softc *sc = device_get_softc(dev);
u_long start;
err = sn_activate(dev);
if (err) {
sn_deactivate(dev);
return (err);
}
/*
* CIS resource is the modem one, so save it away.
*/
sc->modem_rid = sc->port_rid;
sc->modem_res = sc->port_res;
/*
* The MHz XJEM/CCEM series of cards just need to have any
* old resource allocated for the ethernet side of things,
* provided bit 0x80 isn't set in the address. That bit is
* evidentially reserved for modem function and is how the
* card steers the addresses internally.
*/
sc->port_res = NULL;
start = 0;
do
{
sc->port_rid = 1;
sc->port_res = bus_alloc_resource(dev, SYS_RES_IOPORT,
&sc->port_rid, start, ~0, SMC_IO_EXTENT, RF_ACTIVE);
if (sc->port_res == NULL)
break;
if (!(rman_get_start(sc->port_res) & 0x80))
break;
start = rman_get_start(sc->port_res) + SMC_IO_EXTENT;
bus_release_resource(dev, SYS_RES_IOPORT, sc->port_rid,
sc->port_res);
} while (start < 0xff80);
if (sc->port_res == NULL) {
sn_deactivate(dev);
return ENOMEM;
}
return 0;
}
static int
sn_pccard_attach(device_t dev)
{
struct sn_softc *sc = device_get_softc(dev);
u_char eaddr[ETHER_ADDR_LEN];
int i, err;
uint16_t w;
u_char sum;
const struct sn_product *pp;
pp = sn_pccard_lookup(dev);
sum = pp->sw->get_mac(dev, eaddr);
/* Allocate resources so we can program the ether addr */
sc->dev = dev;
err = pp->sw->activate(dev);
if (err != 0)
return (err);
if (sum) {
printf("Programming sn card's addr\n");
SMC_SELECT_BANK(sc, 1);
for (i = 0; i < 3; i++) {
w = (uint16_t)eaddr[i * 2] |
(((uint16_t)eaddr[i * 2 + 1]) << 8);
CSR_WRITE_2(sc, IAR_ADDR0_REG_W + i * 2, w);
}
}
err = sn_attach(dev);
if (err)
sn_deactivate(dev);
return (err);
}
static device_method_t sn_pccard_methods[] = {
/* Device interface */
DEVMETHOD(device_probe, sn_pccard_probe),
DEVMETHOD(device_attach, sn_pccard_attach),
DEVMETHOD(device_detach, sn_detach),
{ 0, 0 }
};
static driver_t sn_pccard_driver = {
"sn",
sn_pccard_methods,
sizeof(struct sn_softc),
};
extern devclass_t sn_devclass;
DRIVER_MODULE(sn, pccard, sn_pccard_driver, sn_devclass, 0, 0);
MODULE_DEPEND(sn, ether, 1, 1, 1);
PCCARD_PNP_INFO(sn_pccard_products);
|
TigerBSD/TigerBSD
|
FreeBSD/sys/dev/sn/if_sn_pccard.c
|
C
|
isc
| 8,607 |
/*
Copyright (C) 2004 Michael J. Silbersack. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY AUTHOR AND CONTRIBUTORS ``AS IS'' AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL AUTHOR OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
SUCH DAMAGE.
*/
#include <sys/types.h>
#include <sys/stat.h>
#include <sys/wait.h>
#include <assert.h>
#include <err.h>
#include <errno.h>
#include <inttypes.h>
#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
/*
* $FreeBSD$
* The goal of this program is to see if fstat reports the correct
* data count for a pipe. Prior to revision 1.172 of sys_pipe.c,
* 0 would be returned once the pipe entered direct write mode.
*
* Linux (2.6) always returns zero, so it's not a valuable platform
* for comparison.
*/
int
main(void)
{
char buffer[32768], buffer2[32768], go[] = "go", go2[] = "go2";
int desc[2], ipc_coord[2];
ssize_t error;
int successes = 0;
struct stat status;
pid_t new_pid;
error = pipe(desc);
if (error == -1)
err(1, "Couldn't allocate data pipe");
error = pipe(ipc_coord);
if (error == -1)
err(1, "Couldn't allocate IPC coordination pipe");
new_pid = fork();
assert(new_pid != -1);
close(new_pid == 0 ? desc[0] : desc[1]);
#define SYNC_R(i, _buf) do { \
int _error = errno; \
warnx("%d: waiting for synchronization", __LINE__); \
if (read(ipc_coord[i], &_buf, sizeof(_buf)) != sizeof(_buf)) \
err(1, "failed to synchronize (%s)", (i == 0 ? "parent" : "child")); \
errno = _error; \
} while(0)
#define SYNC_W(i, _buf) do { \
int _error = errno; \
warnx("%d: sending synchronization", __LINE__); \
if (write(ipc_coord[i], &_buf, sizeof(_buf)) != sizeof(_buf)) \
err(1, "failed to synchronize (%s)", (i == 0 ? "child" : "parent")); \
errno = _error; \
} while(0)
#define WRITE(s) do { \
ssize_t _size; \
if ((_size = write(desc[1], &buffer, s)) != s) \
warn("short write; wrote %zd, expected %d", _size, s); \
} while(0)
if (new_pid == 0) {
SYNC_R(0, go);
WRITE(145);
SYNC_W(0, go2);
SYNC_R(0, go);
WRITE(2048);
SYNC_W(0, go2);
SYNC_R(0, go);
WRITE(4096);
SYNC_W(0, go2);
SYNC_R(0, go);
WRITE(8191);
SYNC_W(0, go2);
SYNC_R(0, go);
SYNC_W(0, go2); /* XXX: why is this required? */
WRITE(8192);
SYNC_W(0, go2);
close(ipc_coord[0]);
close(ipc_coord[1]);
_exit(0);
}
while (successes < 5) {
SYNC_W(1, go);
SYNC_R(1, go2);
fstat(desc[0], &status);
error = read(desc[0], &buffer2, sizeof(buffer2));
if (status.st_size != error)
err(1, "FAILURE: stat size %jd read size %zd",
(intmax_t)status.st_size, error);
if (error > 0) {
printf("SUCCESS at stat size %jd read size %zd\n",
(intmax_t)status.st_size, error);
successes++;
}
}
exit(0);
}
|
TigerBSD/TigerBSD
|
FreeBSD/tests/sys/kern/pipe/pipe_fstat_bug_test.c
|
C
|
isc
| 3,787 |
/*
* The Yices SMT Solver. Copyright 2014 SRI International.
*
* This program may only be used subject to the noncommercial end user
* license agreement which is downloadable along with this program.
*/
#include <stdio.h>
#include <stdlib.h>
#include <stdint.h>
#include <inttypes.h>
#include "utils/int_stack.h"
static int_stack_t stack;
static void print_stack(int_stack_t *stack) {
iblock_t *b;
printf("stack %p\n", stack);
printf(" current block = %p\n", stack->current);
printf(" free list = %p\n", stack->free);
printf(" active blocks:\n");
b = stack->current;
while (b != NULL) {
printf(" block %p: size = %"PRIu32" ptr = %"PRIu32" data = %p\n", b, b->size, b->ptr, b->data);
b = b->next;
}
printf(" free blocks:\n");
b = stack->free;
while (b != NULL) {
printf(" block %p: size = %"PRIu32" ptr = %"PRIu32" data = %p\n", b, b->size, b->ptr, b->data);
b = b->next;
}
printf("\n");
}
int main(void) {
int32_t *a1, *a2, *a3, *a4;
printf("=== Initialization ===\n");
init_istack(&stack);
print_stack(&stack);
printf("=== Allocation a1: size 100 ===\n");
a1 = alloc_istack_array(&stack, 100);
printf(" a1 = %p\n", a1);
print_stack(&stack);
printf("=== Allocation a2: size 500 ===\n");
a2 = alloc_istack_array(&stack, 500);
printf(" a2 = %p\n", a2);
print_stack(&stack);
printf("=== Allocation a3: size 800 ===\n");
a3 = alloc_istack_array(&stack, 800);
printf(" a3 = %p\n", a3);
print_stack(&stack);
printf("=== Allocation a4: size 8000 ===\n");
a4 = alloc_istack_array(&stack, 8000);
printf(" a4 = %p\n", a4);
print_stack(&stack);
printf("=== Free a4 ===\n");
free_istack_array(&stack, a4);
print_stack(&stack);
printf("=== Allocation a4: size 800 ===\n");
a4 = alloc_istack_array(&stack, 800);
printf(" a4 = %p\n", a4);
print_stack(&stack);
printf("=== Free a4 ===\n");
free_istack_array(&stack, a4);
print_stack(&stack);
printf("=== Free a3 ===\n");
free_istack_array(&stack, a3);
print_stack(&stack);
printf("=== Reset ===\n");
reset_istack(&stack);
print_stack(&stack);
delete_istack(&stack);
return 0;
}
|
maelvalais/ocamlyices2
|
ext/yices/tests/unit/test_int_stack.c
|
C
|
isc
| 2,167 |
/*
* Copyright (c) 2006, 2007 ThoughtWorks, Inc.
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*
*/
package com.thoughtworks.cozmos;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.LineNumberReader;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.net.MalformedURLException;
import java.net.Socket;
import java.net.URL;
import java.util.StringTokenizer;
public class ModDavSvnProxyServlet2 extends HttpServlet {
private String targetURL;
private String newPageTemplate;
public void init(ServletConfig servletConfig) throws ServletException {
targetURL = servletConfig.getInitParameter("mod_dav_svn_url");
newPageTemplate = servletConfig.getInitParameter("new_page_template_file");
super.init(servletConfig);
}
protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
String path = req.getServletPath();
Socket socket = startGet(new URL(targetURL + path));
InputStream is = socket.getInputStream();
LineNumberReader lnr = new LineNumberReader(new InputStreamReader(is));
boolean ok = isOk(lnr);
if (!ok) {
socket = startGet(new URL(targetURL + newPageTemplate));
lnr = new LineNumberReader(new InputStreamReader(is));
ok = isOk(lnr);
}
if (ok) {
lnr.readLine(); // Date:
lnr.readLine(); // Server:
lnr.readLine(); // ETag:
lnr.readLine(); // Accept-Ranges:
int contentLength = getContentLen(lnr.readLine());
lnr.readLine(); // Content-Type:
lnr.readLine(); // end of header
resp.setContentType(getServletContext().getMimeType(path));
OutputStream os = resp.getOutputStream();
int done = 0;
while (done < contentLength) {
int i = lnr.read();
done++;
os.write(i);
}
socket.close();
}
}
private int getContentLen(String s) {
StringTokenizer st = new StringTokenizer(s);
st.nextToken();
return Integer.parseInt(st.nextToken());
}
private boolean isOk(LineNumberReader lnr) throws IOException {
return "HTTP/1.1 200 OK".equals(lnr.readLine());
}
private Socket startGet(URL url) throws IOException {
Socket socket = new Socket(url.getHost(), 80);
PrintWriter pw = new PrintWriter(socket.getOutputStream(), true);
pw.println("GET " + url.getPath() + " HTTP/1.1");
pw.println("Host: " + url.getHost());
pw.println();
return socket;
}
}
|
codehaus/cozmos
|
src/main/java/com/thoughtworks/cozmos/ModDavSvnProxyServlet2.java
|
Java
|
isc
| 3,638 |
/*-
* Copyright (c) 2010, 2012 Konstantin Belousov <[email protected]>
* Copyright (c) 2015 The FreeBSD Foundation
* All rights reserved.
*
* Portions of this software were developed by Konstantin Belousov
* under sponsorship from the FreeBSD Foundation.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*/
#include <sys/cdefs.h>
__FBSDID("$FreeBSD$");
#include "opt_compat.h"
#include "opt_vm.h"
#include <sys/param.h>
#include <sys/systm.h>
#include <sys/kernel.h>
#include <sys/lock.h>
#include <sys/malloc.h>
#include <sys/rwlock.h>
#include <sys/sysent.h>
#include <sys/sysctl.h>
#include <sys/vdso.h>
#include <vm/vm.h>
#include <vm/vm_param.h>
#include <vm/pmap.h>
#include <vm/vm_extern.h>
#include <vm/vm_kern.h>
#include <vm/vm_map.h>
#include <vm/vm_object.h>
#include <vm/vm_page.h>
#include <vm/vm_pager.h>
static struct sx shared_page_alloc_sx;
static vm_object_t shared_page_obj;
static int shared_page_free;
char *shared_page_mapping;
void
shared_page_write(int base, int size, const void *data)
{
bcopy(data, shared_page_mapping + base, size);
}
static int
shared_page_alloc_locked(int size, int align)
{
int res;
res = roundup(shared_page_free, align);
if (res + size >= IDX_TO_OFF(shared_page_obj->size))
res = -1;
else
shared_page_free = res + size;
return (res);
}
int
shared_page_alloc(int size, int align)
{
int res;
sx_xlock(&shared_page_alloc_sx);
res = shared_page_alloc_locked(size, align);
sx_xunlock(&shared_page_alloc_sx);
return (res);
}
int
shared_page_fill(int size, int align, const void *data)
{
int res;
sx_xlock(&shared_page_alloc_sx);
res = shared_page_alloc_locked(size, align);
if (res != -1)
shared_page_write(res, size, data);
sx_xunlock(&shared_page_alloc_sx);
return (res);
}
static void
shared_page_init(void *dummy __unused)
{
vm_page_t m;
vm_offset_t addr;
sx_init(&shared_page_alloc_sx, "shpsx");
shared_page_obj = vm_pager_allocate(OBJT_PHYS, 0, PAGE_SIZE,
VM_PROT_DEFAULT, 0, NULL);
VM_OBJECT_WLOCK(shared_page_obj);
m = vm_page_grab(shared_page_obj, 0, VM_ALLOC_NOBUSY | VM_ALLOC_ZERO);
m->valid = VM_PAGE_BITS_ALL;
VM_OBJECT_WUNLOCK(shared_page_obj);
addr = kva_alloc(PAGE_SIZE);
pmap_qenter(addr, &m, 1);
shared_page_mapping = (char *)addr;
}
SYSINIT(shp, SI_SUB_EXEC, SI_ORDER_FIRST, (sysinit_cfunc_t)shared_page_init,
NULL);
/*
* Push the timehands update to the shared page.
*
* The lockless update scheme is similar to the one used to update the
* in-kernel timehands, see sys/kern/kern_tc.c:tc_windup() (which
* calls us after the timehands are updated).
*/
static void
timehands_update(struct vdso_sv_tk *svtk)
{
struct vdso_timehands th;
struct vdso_timekeep *tk;
uint32_t enabled, idx;
enabled = tc_fill_vdso_timehands(&th);
th.th_gen = 0;
idx = svtk->sv_timekeep_curr;
if (++idx >= VDSO_TH_NUM)
idx = 0;
svtk->sv_timekeep_curr = idx;
if (++svtk->sv_timekeep_gen == 0)
svtk->sv_timekeep_gen = 1;
tk = (struct vdso_timekeep *)(shared_page_mapping +
svtk->sv_timekeep_off);
tk->tk_th[idx].th_gen = 0;
atomic_thread_fence_rel();
if (enabled)
tk->tk_th[idx] = th;
atomic_store_rel_32(&tk->tk_th[idx].th_gen, svtk->sv_timekeep_gen);
atomic_store_rel_32(&tk->tk_current, idx);
/*
* The ordering of the assignment to tk_enabled relative to
* the update of the vdso_timehands is not important.
*/
tk->tk_enabled = enabled;
}
#ifdef COMPAT_FREEBSD32
static void
timehands_update32(struct vdso_sv_tk *svtk)
{
struct vdso_timehands32 th;
struct vdso_timekeep32 *tk;
uint32_t enabled, idx;
enabled = tc_fill_vdso_timehands32(&th);
th.th_gen = 0;
idx = svtk->sv_timekeep_curr;
if (++idx >= VDSO_TH_NUM)
idx = 0;
svtk->sv_timekeep_curr = idx;
if (++svtk->sv_timekeep_gen == 0)
svtk->sv_timekeep_gen = 1;
tk = (struct vdso_timekeep32 *)(shared_page_mapping +
svtk->sv_timekeep_off);
tk->tk_th[idx].th_gen = 0;
atomic_thread_fence_rel();
if (enabled)
tk->tk_th[idx] = th;
atomic_store_rel_32(&tk->tk_th[idx].th_gen, svtk->sv_timekeep_gen);
atomic_store_rel_32(&tk->tk_current, idx);
tk->tk_enabled = enabled;
}
#endif
/*
* This is hackish, but easiest way to avoid creating list structures
* that needs to be iterated over from the hardclock interrupt
* context.
*/
static struct vdso_sv_tk *host_svtk;
#ifdef COMPAT_FREEBSD32
static struct vdso_sv_tk *compat32_svtk;
#endif
void
timekeep_push_vdso(void)
{
if (host_svtk != NULL)
timehands_update(host_svtk);
#ifdef COMPAT_FREEBSD32
if (compat32_svtk != NULL)
timehands_update32(compat32_svtk);
#endif
}
struct vdso_sv_tk *
alloc_sv_tk(void)
{
struct vdso_sv_tk *svtk;
int tk_base;
uint32_t tk_ver;
tk_ver = VDSO_TK_VER_CURR;
svtk = malloc(sizeof(struct vdso_sv_tk), M_TEMP, M_WAITOK | M_ZERO);
tk_base = shared_page_alloc(sizeof(struct vdso_timekeep) +
sizeof(struct vdso_timehands) * VDSO_TH_NUM, 16);
KASSERT(tk_base != -1, ("tk_base -1 for native"));
shared_page_write(tk_base + offsetof(struct vdso_timekeep, tk_ver),
sizeof(uint32_t), &tk_ver);
svtk->sv_timekeep_off = tk_base;
timekeep_push_vdso();
return (svtk);
}
#ifdef COMPAT_FREEBSD32
struct vdso_sv_tk *
alloc_sv_tk_compat32(void)
{
struct vdso_sv_tk *svtk;
int tk_base;
uint32_t tk_ver;
svtk = malloc(sizeof(struct vdso_sv_tk), M_TEMP, M_WAITOK | M_ZERO);
tk_ver = VDSO_TK_VER_CURR;
tk_base = shared_page_alloc(sizeof(struct vdso_timekeep32) +
sizeof(struct vdso_timehands32) * VDSO_TH_NUM, 16);
KASSERT(tk_base != -1, ("tk_base -1 for 32bit"));
shared_page_write(tk_base + offsetof(struct vdso_timekeep32,
tk_ver), sizeof(uint32_t), &tk_ver);
svtk->sv_timekeep_off = tk_base;
timekeep_push_vdso();
return (svtk);
}
#endif
void
exec_sysvec_init(void *param)
{
struct sysentvec *sv;
sv = (struct sysentvec *)param;
if ((sv->sv_flags & SV_SHP) == 0)
return;
sv->sv_shared_page_obj = shared_page_obj;
sv->sv_sigcode_base = sv->sv_shared_page_base +
shared_page_fill(*(sv->sv_szsigcode), 16, sv->sv_sigcode);
if ((sv->sv_flags & SV_ABI_MASK) != SV_ABI_FREEBSD)
return;
if ((sv->sv_flags & SV_TIMEKEEP) != 0) {
#ifdef COMPAT_FREEBSD32
if ((sv->sv_flags & SV_ILP32) != 0) {
KASSERT(compat32_svtk == NULL,
("Compat32 already registered"));
compat32_svtk = alloc_sv_tk_compat32();
sv->sv_timekeep_base = sv->sv_shared_page_base +
compat32_svtk->sv_timekeep_off;
} else {
#endif
KASSERT(host_svtk == NULL, ("Host already registered"));
host_svtk = alloc_sv_tk();
sv->sv_timekeep_base = sv->sv_shared_page_base +
host_svtk->sv_timekeep_off;
#ifdef COMPAT_FREEBSD32
}
#endif
}
}
|
TigerBSD/TigerBSD
|
FreeBSD/sys/kern/kern_sharedpage.c
|
C
|
isc
| 7,826 |
# $FreeBSD$
SUBDIR= libasn1 libgssapi_krb5 libgssapi_ntlm libgssapi_spnego libhdb \
libheimntlm libhx509 libkadm5clnt libkadm5srv libkrb5 \
libroken libsl libvers libkdc libwind libheimbase libheimipcc libheimipcs
SUBDIR+= libkafs5 # requires krb_err.h from libkrb5
SUBDIR_DEPEND_libkafs5= libkrb5
.include <bsd.subdir.mk>
|
TigerBSD/TigerBSD
|
FreeBSD/kerberos5/lib/Makefile
|
Makefile
|
isc
| 330 |
TARGETS = cpcond tagintr
EXTRA_OBJS = ../dmatags.o ../dmasend.o
EE_LIBS = -lg -ldma
COMMON_DIR=../../../common
include $(COMMON_DIR)/common-ee.mk
|
jpd002/ps2autotests
|
tests/dma/dmac/Makefile
|
Makefile
|
isc
| 147 |
var doNothing = function () {}
/**
* The `Base` log defines methods that transports will share.
*/
var Base = module.exports = function (config, defaults) {
var cedar = require('../../cedar')
// A log is a shorthand for `log.log`, among other things.
var log = function () {
log.log.apply(log, arguments)
}
// Don't run `setMethods` until all config properties are set.
var setMethods = doNothing
// Define properties that trigger `setMethods`.
Base.resetters.forEach(function (property) {
var value
Object.defineProperty(log, property, {
get: function () {
return value
},
set: function (newValue) {
value = newValue
setMethods.apply(log)
}
})
})
// Copy `config` properties to the `log`.
Base.decorate(log, config, true)
// Apply default properties.
Base.decorate(log, defaults || Base.defaults)
// Set up logging methods.
Base.setMethods.apply(log)
// Re-run `setMethods` if `resetters` change.
setMethods = Base.setMethods
// Return the fully-decorated log function.
return log
}
/**
* Some properties will reset methods if changed.
*/
Base.resetters = ['level', 'prefixes', 'format', 'showTrace']
/**
* Cedar supports 7 levels of logging.
*/
Base.levels = ['trace', 'debug', 'log', 'info', 'warn', 'error', 'fatal']
/**
* Share defaults between log objects.
*/
Base.defaults = {
// Show all log messages by default.
level: 'trace',
// Stream to `stdout` (using `write`).
stream: process.stdout,
// Don't add any space to JSON.
space: '',
// Stringify with `JSON.stringify`.
stringify: JSON.stringify,
// Join arguments together as an array.
join: function (args) {
var list = []
for (var index = 0, length = args.length; index < length; index++) {
var arg = args[index]
if (arg instanceof Error) {
arg = '"' + (arg.stack || arg.toString()).replace(/\n/, '\\n') + '"'
} else {
arg = JSON.stringify(arg, null, this.space)
}
list.push(arg)
}
return '[' + list.join(',') + ']'
},
// Start messages with a prefix for each log method.
prefixes: {
trace: 'TRACE ',
debug: 'DEBUG ',
log: 'LOG ',
info: 'INFO ',
warn: 'WARN ',
error: 'ERROR ',
fatal: 'FATAL '
},
// Format a log message.
format: function (message, type, prefix) {
return prefix + message + '\n'
}
}
/**
* Decorate an object with the properties of another.
*/
Base.decorate = function (object, defaults, shouldOverwrite) {
object = object || {}
for (var key in defaults) {
if (shouldOverwrite || (typeof object[key] === 'undefined')) {
object[key] = defaults[key]
}
}
return object
}
/**
* Create logging methods based on the configured `level`.
*/
Base.setMethods = function () {
var self = this
var found = false
if ((Base.levels.indexOf(self.level) < 0) && self.level !== 'nothing') {
self.error('Unknown log level: "' + self.level + '".')
} else {
Base.levels.forEach(function (methodName, index) {
if (methodName === self.level) {
found = true
}
var prefix = self.prefixes[methodName] || ''
var format = self.format
// If this log is an Emitter, we can catch and emit errors.
if (self.emit) {
self[methodName] = found ? function () {
var message = self.join(arguments)
message = format.call(self, message, methodName, prefix)
try {
self.stream.write(message)
} catch (e) {
self.emit('error', e)
}
} : doNothing
// Otherwise, they'll just throw.
} else {
self[methodName] = found ? function () {
var message = self.join(arguments)
message = format.call(self, message, methodName, prefix)
self.stream.write(message)
} : doNothing
}
})
// Wrap the trace method with a stack tracer.
if (self.trace !== doNothing) {
var traceMethod = self.trace
self.trace = function () {
var e = new Error('')
Error.captureStackTrace(e, self.trace)
var l = arguments.length
arguments[l] = e.stack.split('\n').splice(2).join('\n')
arguments.length = ++l
traceMethod.apply(self, arguments)
}
}
}
}
|
zerious/cedar
|
lib/transports/base.js
|
JavaScript
|
isc
| 4,332 |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace GAPPSF.OKAPI
{
public class SiteInfoNetherlands: SiteInfo
{
public const string STR_INFO = "opencaching.nl";
public SiteInfoNetherlands()
{
ID = "2";
Info = STR_INFO;
OKAPIBaseUrl = "http://www.opencaching.nl/okapi/";
GeocodePrefix = "OB";
}
public override void LoadSettings()
{
Username = Core.ApplicationData.Instance.AccountInfos.GetAccountInfo(GeocodePrefix).AccountName ?? "";
UserID = Core.Settings.Default.OKAPISiteInfoNetherlandsUserID ?? "";
Token = Core.Settings.Default.OKAPISiteInfoNetherlandsToken ?? "";
TokenSecret = Core.Settings.Default.OKAPISiteInfoNetherlandsTokenSecret ?? "";
base.LoadSettings();
}
public override void SaveSettings()
{
Core.ApplicationData.Instance.AccountInfos.GetAccountInfo(GeocodePrefix).AccountName = Username ?? "";
Core.Settings.Default.OKAPISiteInfoNetherlandsUserID = UserID ?? "";
Core.Settings.Default.OKAPISiteInfoNetherlandsToken = Token ?? "";
Core.Settings.Default.OKAPISiteInfoNetherlandsTokenSecret = TokenSecret ?? "";
}
}
}
|
RH-Code/GAPP
|
GAPPSF/OKAPI/SiteInfoNetherlands.cs
|
C#
|
mit
| 1,336 |
import Omni = require('../../lib/omni-sharp-server/omni');
import {Observable, CompositeDisposable} from "rx";
import {setupFeature, restoreBuffers, openEditor} from "../test-helpers";
describe('Run Tests', () => {
setupFeature(['features/run-tests']);
it('adds commands', () => {
var disposable = new CompositeDisposable();
runs(() => {
var commands: any = atom.commands;
expect(commands.registeredCommands['omnisharp-atom:run-all-tests']).toBeTruthy();
expect(commands.registeredCommands['omnisharp-atom:run-fixture-tests']).toBeTruthy();
expect(commands.registeredCommands['omnisharp-atom:run-single-test']).toBeTruthy();
expect(commands.registeredCommands['omnisharp-atom:run-last-test']).toBeTruthy();
disposable.dispose();
});
});
// TODO: Test functionality
});
|
rambocoder/omnisharp-atom
|
spec/features/run-tests-spec.ts
|
TypeScript
|
mit
| 886 |
*{
margin: 0px;
padding: 0px;
}
#introduce{
width: 1000px;
height: 680px;
background-color: #f4f4f4;
margin: auto;
}
h1{
font-size: 2em;
text-align: center;
}
li{
list-style: none;
font-size: 1em;
}
#pic1{
margin-left: 60px;
float: left;
width: 361px;
height: 237px;
}
#pic2{
margin-left: 300px;
float: left;
width: 169px;
height: 237px;
}
#pic-describe{
text-align: center;
font-size: 13px;
}
|
JackyBird/nju_sun
|
WebRoot/css/operating_Guide.css
|
CSS
|
mit
| 419 |
//
using System;
if (double.TryParse(aaa, out var bbb))
{
// ...
}
//
|
general-language-syntax/GLS
|
test/integration/IfStringToDoubleEnd/if string to double end.cs
|
C#
|
mit
| 75 |
{{- define "main" -}}
<h1>{{ .Title | markdownify }}</h1>
<p>
<small class="text-secondary">
{{ $customDateFormat := "January 2, 2006" }}
{{ with .Site.Params.customDateFormat }}{{ $customDateFormat = . }}{{ end }}
{{ .PublishDate.Format $customDateFormat }}{{ if gt .Lastmod .PublishDate }}, updated {{ .Lastmod.Format $customDateFormat }}{{ end }}
</small>
{{ partial "tags" . }}
</p>
{{ .Content }}
{{- end -}}
|
senthilsiva/senthilsiva.github.io
|
themes/minimal-bootstrap-hugo-theme/layouts/post/single.html
|
HTML
|
mit
| 427 |
using System;
using System.Collections.Generic;
using KellermanSoftware.CompareNetObjects;
using KellermanSoftware.CompareNetObjects.TypeComparers;
using ProtoBuf;
namespace Abc.Zebus.Testing.Comparison
{
internal static class ComparisonExtensions
{
public static bool DeepCompare<T>(this T firstObj, T secondObj, params string[] elementsToIgnore)
{
var comparer = CreateComparer();
comparer.Config.MembersToIgnore.AddRange(elementsToIgnore);
return comparer.Compare(firstObj, secondObj).AreEqual;
}
public static CompareLogic CreateComparer()
{
return new CompareLogic
{
Config =
{
CompareStaticProperties = false,
CompareStaticFields = false,
CustomComparers =
{
// TODO : Is this still used?
new EquatableComparer()
},
AttributesToIgnore = new List<Type> { typeof(ProtoIgnoreAttribute) },
}
};
}
private class EquatableComparer : BaseTypeComparer
{
public EquatableComparer()
: base(RootComparerFactory.GetRootComparer())
{
}
public override bool IsTypeMatch(Type type1, Type type2)
{
if (type1 != type2)
return false;
return typeof(IEquatable<>).MakeGenericType(type1).IsAssignableFrom(type1);
}
public override void CompareType(CompareParms parms)
{
if (!Equals(parms.Object1, parms.Object2))
AddDifference(parms);
}
}
}
}
|
biarne-a/Zebus
|
src/Abc.Zebus.Testing/Comparison/ComparisonExtensions.cs
|
C#
|
mit
| 1,822 |
// Generated on 12/11/2014 19:01:22
using System;
using System.Collections.Generic;
using System.Linq;
using BlueSheep.Common.Protocol.Types;
using BlueSheep.Common.IO;
using BlueSheep.Engine.Types;
namespace BlueSheep.Common.Protocol.Messages
{
public class SequenceNumberMessage : Message
{
public new const uint ID =6317;
public override uint ProtocolID
{
get { return ID; }
}
public ushort number;
public SequenceNumberMessage()
{
}
public SequenceNumberMessage(ushort number)
{
this.number = number;
}
public override void Serialize(BigEndianWriter writer)
{
writer.WriteUShort(number);
}
public override void Deserialize(BigEndianReader reader)
{
number = reader.ReadUShort();
if (number < 0 || number > 65535)
throw new Exception("Forbidden value on number = " + number + ", it doesn't respect the following condition : number < 0 || number > 65535");
}
}
}
|
Sadikk/BlueSheep
|
BlueSheep/Common/Protocol/messages/game/basic/SequenceNumberMessage.cs
|
C#
|
mit
| 1,158 |
<?php
defined('BASEPATH') OR exit('No direct script access allowed');
class Unittest extends CI_Controller {
function __construct() {
parent::__construct ();
$this->load->model('UnitTest_model');
$this->load->library('unit_test');
}
function index(){
echo "test";
}
function debate(){
$offset = $this->input->get("offset");
$limit = $this->input->get("limit");
$debate_test_list = $this->UnitTest_model->debate_list_test($offset, $limit);
for($i = 0 ; $i < count($debate_test_list); $i ++){
$this->unit->use_strict(TRUE);
$this->unit->run($debate_test_list[$i]->id, 'is_numeric', 'id_type');
$this->unit->run($debate_test_list[$i]->title, 'is_string', 'title_type');
$this->unit->run($debate_test_list[$i]->content, 'is_string', 'content_type');
$this->unit->run($debate_test_list[$i]->reg_id, 'is_numeric', 'reg_id_type');
$this->unit->run($debate_test_list[$i]->del_st, 'is_numeric', 'del_st_type');
}
echo $this->unit->report();
}
function debate_reply(){
$offset = $this->input->get("offset");
$limit = $this->input->get("limit");
$debate_reply_test_list = $this->UnitTest_model->debate_reply_list_test($offset, $limit);
for($i = 0 ; $i < count($debate_reply_test_list); $i ++){
$this->unit->use_strict(TRUE);
$this->unit->run($debate_reply_test_list[$i]->id, 'is_numeric', 'id_type');
$this->unit->run($debate_reply_test_list[$i]->debate_id, 'is_numeric', 'debate_id_type');
$this->unit->run($debate_reply_test_list[$i]->content, 'is_string', 'content_type');
$this->unit->run($debate_reply_test_list[$i]->reg_id, 'is_numeric', 'reg_id_type');
$this->unit->run($debate_reply_test_list[$i]->del_st, 'is_numeric', 'del_st_type');
}
echo $this->unit->report();
}
function debate_back(){
$offset = $this->input->get("offset");
$limit = $this->input->get("limit");
$debate_back_test_list = $this->UnitTest_model->debate_back_list_test($offset, $limit);
for($i = 0 ; $i < count($debate_back_test_list); $i ++){
$this->unit->use_strict(TRUE);
$this->unit->run($debate_back_test_list[$i]->id, 'is_numeric', 'id_type');
$this->unit->run($debate_back_test_list[$i]->debate_id, 'is_numeric', 'debate_id_type');
$this->unit->run($debate_back_test_list[$i]->seq, 'is_numeric', 'seq_type');
$this->unit->run($debate_back_test_list[$i]->title, 'is_string', 'title_type');
$this->unit->run($debate_back_test_list[$i]->content, 'is_string', 'content_type');
}
echo $this->unit->report();
}
function board(){
$offset = $this->input->get("offset");
$limit = $this->input->get("limit");
$board_test_list = $this->UnitTest_model->board_list_test($offset, $limit);
for($i = 0 ; $i < count($board_test_list); $i ++){
$this->unit->use_strict(TRUE);
$this->unit->run($board_test_list[$i]->id, 'is_numeric', 'id_type');
$this->unit->run($board_test_list[$i]->title, 'is_string', 'title_type');
$this->unit->run($board_test_list[$i]->content, 'is_string', 'content_type');
$this->unit->run($board_test_list[$i]->reg_id, 'is_numeric', 'reg_id_type');
$this->unit->run($board_test_list[$i]->del_st, 'is_numeric', 'del_st_type');
}
echo $this->unit->report();
}
function board_reply(){
$offset = $this->input->get("offset");
$limit = $this->input->get("limit");
$board_reply_test_list = $this->UnitTest_model->board_reply_list_test($offset, $limit);
for($i = 0 ; $i < count($board_reply_test_list); $i ++){
$this->unit->use_strict(TRUE);
$this->unit->run($board_reply_test_list[$i]->id, 'is_numeric', 'id_type');
$this->unit->run($board_reply_test_list[$i]->board_id, 'is_numeric', 'board_id_type');
$this->unit->run($board_reply_test_list[$i]->content, 'is_string', 'content_type');
$this->unit->run($board_reply_test_list[$i]->reg_id, 'is_numeric', 'reg_id_type');
$this->unit->run($board_reply_test_list[$i]->del_st, 'is_numeric', 'del_st_type');
}
echo $this->unit->report();
}
function law(){
$offset = $this->input->get("offset");
$limit = $this->input->get("limit");
$law_test_list = $this->UnitTest_model->law_list_test($offset, $limit);
for($i = 0 ; $i < count($law_test_list); $i ++){
$this->unit->use_strict(TRUE);
$this->unit->run($law_test_list[$i]->id, 'is_numeric', 'id_type');
$this->unit->run($law_test_list[$i]->d1, 'is_null', 'd1_type');
$this->unit->run($law_test_list[$i]->d2, 'is_null', 'd2_type');
$this->unit->run($law_test_list[$i]->d3, 'is_null', 'd3_type');
$this->unit->run($law_test_list[$i]->d4, 'is_null', 'd4_type');
$this->unit->run($law_test_list[$i]->d5, 'is_null', 'd5_type');
$this->unit->run($law_test_list[$i]->d6, 'is_null', 'd6_type');
$this->unit->run($law_test_list[$i]->d7, 'is_null', 'd7_type');
$this->unit->run($law_test_list[$i]->d8, 'is_null', 'd8_type');
$this->unit->run($law_test_list[$i]->reg_id, 'is_numeric', 'reg_id_type');
}
echo $this->unit->report();
}
function law_model(){
$offset = $this->input->get("offset");
$limit = $this->input->get("limit");
$law_model_test_list = $this->UnitTest_model->law_model_list_test($offset, $limit);
for($i = 0 ; $i < count($law_model_test_list); $i ++){
$this->unit->use_strict(TRUE);
$this->unit->run($law_model_test_list[$i]->id, 'is_numeric', 'id_type');
$this->unit->run($law_model_test_list[$i]->title, 'is_string', 'title_type');
$this->unit->run($law_model_test_list[$i]->content, 'is_string', 'content_type');
$this->unit->run($law_model_test_list[$i]->reg_id, 'is_numeric', 'reg_id_type');
$this->unit->run($law_model_test_list[$i]->del_st, 'is_numeric', 'del_st_type');
}
echo $this->unit->report();
}
function log(){
$offset = $this->input->get("offset");
$limit = $this->input->get("limit");
$log_test_list = $this->UnitTest_model->log_list_test($offset, $limit);
for($i = 0 ; $i < count($log_test_list); $i ++){
$this->unit->use_strict(TRUE);
$this->unit->run($log_test_list[$i]->id, 'is_numeric', 'id_type');
$this->unit->run($log_test_list[$i]->code, 'is_numeric', 'code_type');
$this->unit->run($log_test_list[$i]->content_id, 'is_numeric', 'content_id_type');
$this->unit->run($log_test_list[$i]->reg_id, 'is_numeric', 'reg_id_type');
}
echo $this->unit->report();
}
function user(){
$offset = $this->input->get("offset");
$limit = $this->input->get("limit");
$user_test_list = $this->UnitTest_model->user_list_test($offset, $limit);
for($i = 0 ; $i < count($user_test_list); $i ++){
$this->unit->use_strict(TRUE);
$this->unit->run($user_test_list[$i]->id, 'is_numeric', 'id_type');
$this->unit->run($user_test_list[$i]->email, 'is_string', 'email_type');
$this->unit->run($user_test_list[$i]->auth_code, 'is_string', 'auth_code_type');
}
echo $this->unit->report();
}
function user_info(){
$offset = $this->input->get("offset");
$limit = $this->input->get("limit");
$user_info_test_list = $this->UnitTest_model->user_info_list_test($offset, $limit);
for($i = 0 ; $i < count($user_info_test_list); $i ++){
$this->unit->use_strict(TRUE);
$this->unit->run($user_info_test_list[$i]->id, 'is_numeric', 'id_type');
$this->unit->run($user_info_test_list[$i]->user_id, 'is_numeric', 'user_id_type');
$this->unit->run($user_info_test_list[$i]->user_code, 'is_numeric', 'user_code_type');
$this->unit->run($user_info_test_list[$i]->acc_st, 'is_numeric', 'acc_st_type');
}
echo $this->unit->report();
}
}
|
dvmoomoodv/CORK-server
|
vendor/codeigniter/framework/application/controllers/Unittest.php
|
PHP
|
mit
| 7,831 |
# modern-django
Modern Django: A Guide on How to Deploy Django-based Web Applications in 2017
|
safouh94/modern-django
|
README.md
|
Markdown
|
mit
| 94 |
#include <iostream>
using namespace std;
#include <omp.h>
#define SIZE 8
int main(void){
int x[SIZE];
int sum=0;
for(int i=0;i<SIZE;i++){
x[i]=i;
}
#pragma omp parallel for reduction (+:sum)
for(int i=0;i<SIZE;i++){
sum+=x[i];
}
cout<<sum<<std::endl;
return 0;
}
|
wasit7/cs426
|
lectures/week02_omp2/week022_forReduction/for.cpp
|
C++
|
mit
| 300 |
- 只验证值,不关心这个值从哪里来
- 验证成功返回`undefined`,验证失败返回提示信息
- 验证又分:输入时验证,提交时验证
- 验证失败处理:提示,获得焦点
- 验证失败提示:提示时机,提示方式
|
jserz/feget
|
专题/表单验证/记录.md
|
Markdown
|
mit
| 262 |
from .image import Image
from .product_category import ProductCategory
from .supplier import Supplier, PaymentMethod
from .product import Product
from .product import ProductImage
from .enum_values import EnumValues
from .related_values import RelatedValues
from .customer import Customer
from .expense import Expense
from .incoming import Incoming
from .shipping import Shipping, ShippingLine
from .receiving import Receiving, ReceivingLine
from .inventory_transaction import InventoryTransaction, InventoryTransactionLine
from .purchase_order import PurchaseOrder, PurchaseOrderLine
from .sales_order import SalesOrder, SalesOrderLine
from .user import User
from .role import Role, roles_users
from .organization import Organization
from .inventory_in_out_link import InventoryInOutLink
from .aspects import update_menemonic
from .product_inventory import ProductInventory
|
betterlife/psi
|
psi/app/models/__init__.py
|
Python
|
mit
| 875 |
import type {ResponseType} from "./base.type";
function parseJSON(response: ResponseType): Object {
return response.json();
}
export {parseJSON};
|
lingui/everest
|
src/parsers.js
|
JavaScript
|
mit
| 150 |
// This code contains NVIDIA Confidential Information and is disclosed to you
// under a form of NVIDIA software license agreement provided separately to you.
//
// Notice
// NVIDIA Corporation and its licensors retain all intellectual property and
// proprietary rights in and to this software and related documentation and
// any modifications thereto. Any use, reproduction, disclosure, or
// distribution of this software and related documentation without an express
// license agreement from NVIDIA Corporation is strictly prohibited.
//
// ALL NVIDIA DESIGN SPECIFICATIONS, CODE ARE PROVIDED "AS IS.". NVIDIA MAKES
// NO WARRANTIES, EXPRESSED, IMPLIED, STATUTORY, OR OTHERWISE WITH RESPECT TO
// THE MATERIALS, AND EXPRESSLY DISCLAIMS ALL IMPLIED WARRANTIES OF NONINFRINGEMENT,
// MERCHANTABILITY, AND FITNESS FOR A PARTICULAR PURPOSE.
//
// Information and code furnished is believed to be accurate and reliable.
// However, NVIDIA Corporation assumes no responsibility for the consequences of use of such
// information or for any infringement of patents or other rights of third parties that may
// result from its use. No license is granted by implication or otherwise under any patent
// or patent rights of NVIDIA Corporation. Details are subject to change without notice.
// This code supersedes and replaces all information previously supplied.
// NVIDIA Corporation products are not authorized for use as critical
// components in life support devices or systems without express written approval of
// NVIDIA Corporation.
//
// Copyright (c) 2008-2013 NVIDIA Corporation. All rights reserved.
#include <RendererTextureDesc.h>
using namespace SampleRenderer;
RendererTextureDesc::RendererTextureDesc(void)
{
format = RendererTexture::NUM_FORMATS;
filter = RendererTexture::FILTER_LINEAR;
addressingU = RendererTexture::ADDRESSING_WRAP;
addressingV = RendererTexture::ADDRESSING_WRAP;
addressingW = RendererTexture::ADDRESSING_WRAP;
width = 0;
height = 0;
depth = 1;
numLevels = 0;
renderTarget = false;
data = NULL;
}
bool RendererTextureDesc::isValid(void) const
{
bool ok = true;
if(format >= RendererTexture2D::NUM_FORMATS) ok = false;
if(filter >= RendererTexture2D::NUM_FILTERS) ok = false;
if(addressingU >= RendererTexture2D::NUM_ADDRESSING) ok = false;
if(addressingV >= RendererTexture2D::NUM_ADDRESSING) ok = false;
if(width <= 0 || height <= 0 || depth <= 0) ok = false; // TODO: check for power of two.
if(numLevels <= 0) ok = false;
if(renderTarget)
{
if(depth > 1) ok = false;
if(format == RendererTexture2D::FORMAT_DXT1) ok = false;
if(format == RendererTexture2D::FORMAT_DXT3) ok = false;
if(format == RendererTexture2D::FORMAT_DXT5) ok = false;
}
return ok;
}
|
jjuiddong/KarlSims
|
SampleFramework/renderer/src/RendererTextureDesc.cpp
|
C++
|
mit
| 2,787 |
var request = require('request'),
mongoose = require('mongoose'),
util = require('util'),
url = require('url'),
helpers = require('./helpers'),
sync = require('./sync')
// turn off request pooling
request.defaults({ agent:false })
// cache elasticsearch url options for elmongo.search() to use
var elasticUrlOptions = null
/**
* Attach mongoose plugin for elasticsearch indexing
*
* @param {Object} schema mongoose schema
* @param {Object} options elasticsearch options object. Keys: host, port, index, type
*/
module.exports = elmongo = function (schema, options) {
// attach methods to schema
schema.methods.index = index
schema.methods.unindex = unindex
schema.statics.sync = function (cb) {
options = helpers.mergeModelOptions(options, this)
return sync.call(this, schema, options, cb)
}
schema.statics.search = function (searchOpts, cb) {
options = helpers.mergeModelOptions(options, this)
var searchUri = helpers.makeTypeUri(options) + '/_search?search_type=dfs_query_then_fetch&preference=_primary_first'
return helpers.doSearchAndNormalizeResults(searchUri, searchOpts, cb)
}
// attach mongoose middleware hooks
schema.post('save', function () {
options = helpers.mergeModelOptions(options, this)
this.index(options)
})
schema.post('remove', function () {
options = helpers.mergeModelOptions(options, this)
this.unindex(options)
})
}
/**
* Search across multiple collections. Same usage as model search, but with an extra key on `searchOpts` - `collections`
* @param {Object} searchOpts
* @param {Function} cb
*/
elmongo.search = function (searchOpts, cb) {
// merge elasticsearch url config options
elasticUrlOptions = helpers.mergeOptions(elasticUrlOptions)
// determine collections to search on
var collections = searchOpts.collections;
if (elasticUrlOptions.prefix) {
// prefix was specified - namespace the index names to use the prefix for each collection's index
if (searchOpts.collections && searchOpts.collections.length) {
// collections were specified - prepend the prefix on each collection name
collections = collections.map(function (collection) {
return elasticUrlOptions.prefix + '-' + collection
})
} else {
// no collections specified, but prefix specified - use wildcard index with prefix
collections = [ elasticUrlOptions.prefix + '*' ]
}
} else {
// no prefix used
// if collections specified, just use their names without the prefix
if (!collections) {
// no collections were specified so use _all (searches all collections), without prefix
searchOpts.collections = [ '_all' ]
}
}
var searchUri = helpers.makeDomainUri(elasticUrlOptions) + '/' + collections.join(',') + '/_search?search_type=dfs_query_then_fetch&preference=_primary_first'
return helpers.doSearchAndNormalizeResults(searchUri, searchOpts, cb)
}
/**
* Configure the Elasticsearch url options for `elmongo.search()`.
*
* @param {Object} options - keys: host, port, prefix (optional)
*/
elmongo.search.config = function (options) {
// only overwrite `options` values that are being specified in this call to `config`
if (elasticUrlOptions) {
Object
.keys(elasticUrlOptions)
.forEach(function (key) {
elasticUrlOptions[key] = options[key] || elasticUrlOptions[key]
})
}
// normalize the `options` object
elasticUrlOptions = helpers.mergeOptions(options)
}
/**
* Index a document in elasticsearch (create if not existing)
*
* @param {Object} options elasticsearch options object. Keys: host, port, index, type
*/
function index (options) {
var self = this
// strip mongoose-added functions, depopulate any populated fields, and serialize the doc
var esearchDoc = helpers.serializeModel(this)
var indexUri = helpers.makeDocumentUri(options, self)
var reqOpts = {
method: 'PUT',
url: indexUri,
body: JSON.stringify(esearchDoc)
}
// console.log('index:', indexUri)
helpers.backOffRequest(reqOpts, function (err, res, body) {
if (err) {
var error = new Error('Elasticsearch document indexing error: '+util.inspect(err, true, 10, true))
error.details = err
self.emit('error', error)
return
}
self.emit('elmongo-indexed', body)
})
}
/**
* Remove a document from elasticsearch
*
* @param {Object} options elasticsearch options object. Keys: host, port, index, type
*/
function unindex (options) {
var self = this
var unindexUri = helpers.makeDocumentUri(options, self)
// console.log('unindex:', unindexUri)
var reqOpts = {
method: 'DELETE',
url: unindexUri
}
helpers.backOffRequest(reqOpts, function (err, res, body) {
if (err) {
var error = new Error('Elasticsearch document index deletion error: '+util.inspect(err, true, 10, true))
error.details = err
self.emit('error', error)
return
}
self.emit('elmongo-unindexed', body)
})
}
|
regini/inSquare
|
inSquareBackend/cloud.insquare/node_modules/elmongo/lib/elmongo.js
|
JavaScript
|
mit
| 5,351 |
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=US-ASCII">
<title>local::basic_endpoint::operator!=</title>
<link rel="stylesheet" href="../../../../../doc/src/boostbook.css" type="text/css">
<meta name="generator" content="DocBook XSL Stylesheets V1.78.1">
<link rel="home" href="../../../boost_asio.html" title="Boost.Asio">
<link rel="up" href="../local__basic_endpoint.html" title="local::basic_endpoint">
<link rel="prev" href="data_type.html" title="local::basic_endpoint::data_type">
<link rel="next" href="operator_lt_.html" title="local::basic_endpoint::operator<">
</head>
<body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF">
<table cellpadding="2" width="100%"><tr>
<td valign="top"><img alt="Boost C++ Libraries" width="277" height="86" src="../../../../../boost.png"></td>
<td align="center"><a href="../../../../../index.html">Home</a></td>
<td align="center"><a href="../../../../../libs/libraries.htm">Libraries</a></td>
<td align="center"><a href="http://www.boost.org/users/people.html">People</a></td>
<td align="center"><a href="http://www.boost.org/users/faq.html">FAQ</a></td>
<td align="center"><a href="../../../../../more/index.htm">More</a></td>
</tr></table>
<hr>
<div class="spirit-nav">
<a accesskey="p" href="data_type.html"><img src="../../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../local__basic_endpoint.html"><img src="../../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../../boost_asio.html"><img src="../../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="operator_lt_.html"><img src="../../../../../doc/src/images/next.png" alt="Next"></a>
</div>
<div class="section">
<div class="titlepage"><div><div><h4 class="title">
<a name="boost_asio.reference.local__basic_endpoint.operator_not__eq_"></a><a class="link" href="operator_not__eq_.html" title="local::basic_endpoint::operator!=">local::basic_endpoint::operator!=</a>
</h4></div></div></div>
<p>
<a class="indexterm" name="idp180210864"></a>
Compare two endpoints for inequality.
</p>
<pre class="programlisting"><span class="keyword">friend</span> <span class="keyword">bool</span> <span class="keyword">operator</span><span class="special">!=(</span>
<span class="keyword">const</span> <span class="identifier">basic_endpoint</span><span class="special"><</span> <span class="identifier">Protocol</span> <span class="special">></span> <span class="special">&</span> <span class="identifier">e1</span><span class="special">,</span>
<span class="keyword">const</span> <span class="identifier">basic_endpoint</span><span class="special"><</span> <span class="identifier">Protocol</span> <span class="special">></span> <span class="special">&</span> <span class="identifier">e2</span><span class="special">);</span>
</pre>
<h6>
<a name="boost_asio.reference.local__basic_endpoint.operator_not__eq_.h0"></a>
<span class="phrase"><a name="boost_asio.reference.local__basic_endpoint.operator_not__eq_.requirements"></a></span><a class="link" href="operator_not__eq_.html#boost_asio.reference.local__basic_endpoint.operator_not__eq_.requirements">Requirements</a>
</h6>
<p>
<span class="emphasis"><em>Header: </em></span><code class="literal">boost/asio/local/basic_endpoint.hpp</code>
</p>
<p>
<span class="emphasis"><em>Convenience header: </em></span><code class="literal">boost/asio.hpp</code>
</p>
</div>
<table xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" width="100%"><tr>
<td align="left"></td>
<td align="right"><div class="copyright-footer">Copyright © 2003-2014 Christopher M. Kohlhoff<p>
Distributed under the Boost Software License, Version 1.0. (See accompanying
file LICENSE_1_0.txt or copy at <a href="http://www.boost.org/LICENSE_1_0.txt" target="_top">http://www.boost.org/LICENSE_1_0.txt</a>)
</p>
</div></td>
</tr></table>
<hr>
<div class="spirit-nav">
<a accesskey="p" href="data_type.html"><img src="../../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../local__basic_endpoint.html"><img src="../../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../../boost_asio.html"><img src="../../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="operator_lt_.html"><img src="../../../../../doc/src/images/next.png" alt="Next"></a>
</div>
</body>
</html>
|
rkq/cxxexp
|
third-party/src/boost_1_56_0/doc/html/boost_asio/reference/local__basic_endpoint/operator_not__eq_.html
|
HTML
|
mit
| 4,511 |
/**
* Copyright 2015 Telerik AD
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
(function(f, define){
define([], f);
})(function(){
(function( window, undefined ) {
var kendo = window.kendo || (window.kendo = { cultures: {} });
kendo.cultures["qut"] = {
name: "qut",
numberFormat: {
pattern: ["-n"],
decimals: 2,
",": ",",
".": ".",
groupSize: [3],
percent: {
pattern: ["-n %","n %"],
decimals: 2,
",": ",",
".": ".",
groupSize: [3],
symbol: "%"
},
currency: {
name: "",
abbr: "",
pattern: ["($n)","$n"],
decimals: 2,
",": ",",
".": ".",
groupSize: [3],
symbol: "Q"
}
},
calendars: {
standard: {
days: {
names: ["juq\u0027ij","kaq\u0027ij","oxq\u0027ij","kajq\u0027ij","joq\u0027ij","waqq\u0027ij","wuqq\u0027ij"],
namesAbbr: ["juq\u0027","kaq\u0027","oxq\u0027","kajq\u0027","joq\u0027","waqq\u0027","wuqq\u0027"],
namesShort: ["ju","ka","ox","kj","jo","wa","wu"]
},
months: {
names: ["nab\u0027e ik\u0027","ukab\u0027 ik\u0027","urox ik\u0027","ukaj ik\u0027","uro ik\u0027","uwaq ik\u0027","uwuq ik\u0027","uwajxaq ik\u0027","ub\u0027elej ik\u0027","ulaj ik\u0027","ujulaj ik\u0027","ukab\u0027laj ik\u0027"],
namesAbbr: ["nab\u0027e","ukab\u0027","urox","ukaj","uro","uwaq","uwuq","uwajxaq","ub\u0027elej","ulaj","ujulaj","ukab\u0027laj"]
},
AM: ["a.m.","a.m.","A.M."],
PM: ["p.m.","p.m.","P.M."],
patterns: {
d: "dd/MM/yyyy",
D: "dddd, dd' rech 'MMMM' rech 'yyyy",
F: "dddd, dd' rech 'MMMM' rech 'yyyy h:mm:ss tt",
g: "dd/MM/yyyy h:mm tt",
G: "dd/MM/yyyy h:mm:ss tt",
m: "d' rech 'MMMM",
M: "d' rech 'MMMM",
s: "yyyy'-'MM'-'dd'T'HH':'mm':'ss",
t: "h:mm tt",
T: "h:mm:ss tt",
u: "yyyy'-'MM'-'dd HH':'mm':'ss'Z'",
y: "MMMM' rech 'yyyy",
Y: "MMMM' rech 'yyyy"
},
"/": "/",
":": ":",
firstDay: 1
}
}
}
})(this);
return window.kendo;
}, typeof define == 'function' && define.amd ? define : function(_, f){ f(); });
|
assunluis80/Web-Starter-Template
|
assets/scripts/vendors/kendo_ui/cultures/kendo.culture.qut.js
|
JavaScript
|
mit
| 3,281 |
#!/bin/bash
export LDFLAGS="-lstdc++fs"
mkdir -p $PREFIX/lib
./configure --prefix $PREFIX
make
make install
|
blankenberg/bioconda-recipes
|
recipes/libmaus2/build.sh
|
Shell
|
mit
| 109 |
using System;
using System.Collections.Generic;
using System.Text;
using FlatRedBall;
using FlatRedBall.Gui;
using FlatRedBall.AI.Pathfinding;
#if FRB_MDX
using Color = System.Drawing.Color;
#else
using Color = Microsoft.Xna.Framework.Graphics.Color;
#endif
using CameraPropertyGrid = EditorObjects.Gui.CameraPropertyGrid;
using EditorObjects.Gui;
using EditorObjects;
namespace AIEditor.Gui
{
public static class GuiData
{
#region Fields
static int mFramesSinceLastExpensiveGuiUpdate = 0;
static Menu mMenuStrip;
static CameraPropertyGrid mCameraPropertyGrid;
static NodeNetworkPropertyGrid mNodeNetworkPropertyGrid;
static ToolsWindow mToolsWindow;
static CommandDisplay mCommandDisplay;
static ScenePropertyGrid mScenePropertyGrid;
static ShapeCollectionPropertyGrid mShapeCollectionPropertyGrid;
public static EditorPropertiesGrid mEditorPropertiesGrid;
#endregion
#region Properties
public static CameraPropertyGrid CameraPropertyGrid
{
get { return mCameraPropertyGrid; }
}
public static CommandDisplay CommandDisplay
{
get { return mCommandDisplay; }
}
public static EditorPropertiesGrid EditorPropertiesGrid
{
get { return mEditorPropertiesGrid; }
}
public static NodeNetworkPropertyGrid NodeNetworkPropertyGrid
{
get { return mNodeNetworkPropertyGrid; }
set { mNodeNetworkPropertyGrid = value; }
}
public static ScenePropertyGrid ScenePropertyGrid
{
get { return mScenePropertyGrid; }
}
public static ShapeCollectionPropertyGrid ShapeCollectionPropertyGrid
{
get { return mShapeCollectionPropertyGrid; }
}
public static ToolsWindow ToolsWindow
{
get { return mToolsWindow; }
}
#endregion
#region Events
private static void CreateColorPropertyGrid(Window callingWindow)
{
((PropertyGrid<Color>)callingWindow).ExcludeAllMembers();
((PropertyGrid<Color>)callingWindow).IncludeMember("A");
((PropertyGrid<Color>)callingWindow).IncludeMember("R");
((PropertyGrid<Color>)callingWindow).IncludeMember("G");
((PropertyGrid<Color>)callingWindow).IncludeMember("B");
callingWindow.Y = 40;
}
private static void CreatePositionedNodePropertyGrid(Window callingWindow)
{
PropertyGrid<PositionedNode> asPropertyGrid = callingWindow as PropertyGrid<PositionedNode>;
asPropertyGrid.ExcludeMember("CostToGetHere");
asPropertyGrid.ExcludeMember("Links");
asPropertyGrid.ExcludeMember("X");
asPropertyGrid.ExcludeMember("Y");
asPropertyGrid.ExcludeMember("Z");
asPropertyGrid.Name = "Positioned Node";
}
#endregion
#region Methods
#region Public Methods
public static void Initialize()
{
mMenuStrip = new Menu();
mToolsWindow = new ToolsWindow();
CreatePropertyGrids();
mCommandDisplay = new CommandDisplay();
CreateListDisplayWindows();
}
public static void Update()
{
if (EditorData.Scene != mScenePropertyGrid.SelectedObject)
{
mScenePropertyGrid.SelectedObject = EditorData.Scene;
}
mScenePropertyGrid.UpdateDisplayedProperties();
mNodeNetworkPropertyGrid.Update();
mCameraPropertyGrid.UpdateDisplayedProperties();
// This can be slow. We can speed it up by only doing it every X frames
const int updateEveryXFrames = 30;
mFramesSinceLastExpensiveGuiUpdate++;
if (mFramesSinceLastExpensiveGuiUpdate >= updateEveryXFrames)
{
mNodeNetworkPropertyGrid.UpdateDisplayedProperties();
mFramesSinceLastExpensiveGuiUpdate = 0;
}
#region Update the ShapeCollection PropertyGrid
if (mShapeCollectionPropertyGrid.Visible)
{
if (mShapeCollectionPropertyGrid.SelectedObject != EditorData.ShapeCollection)
{
mShapeCollectionPropertyGrid.SelectedObject = EditorData.ShapeCollection;
}
mShapeCollectionPropertyGrid.UpdateDisplayedProperties();
}
#endregion
}
#endregion
#region Private Methods
private static void CreateListDisplayWindows()
{
}
private static void CreatePropertyGrids()
{
#region CamerPropertyGrid
mCameraPropertyGrid = new CameraPropertyGrid(GuiManager.Cursor);
GuiManager.AddWindow(mCameraPropertyGrid);
mCameraPropertyGrid.SelectedObject = SpriteManager.Camera;
mCameraPropertyGrid.X = mCameraPropertyGrid.ScaleX;
mCameraPropertyGrid.Y = 40;
mCameraPropertyGrid.HasCloseButton = true;
mCameraPropertyGrid.UndoInstructions =
UndoManager.Instructions;
#endregion
#region NodeNetwork PropertyGrid
mNodeNetworkPropertyGrid = new NodeNetworkPropertyGrid();
mNodeNetworkPropertyGrid.SelectedObject = EditorData.NodeNetwork;
mNodeNetworkPropertyGrid.X = mNodeNetworkPropertyGrid.ScaleX;
mNodeNetworkPropertyGrid.Y = 61;
mNodeNetworkPropertyGrid.HasCloseButton = true;
mNodeNetworkPropertyGrid.UndoInstructions =
UndoManager.Instructions;
#endregion
#region ScenePropertyGrid
mScenePropertyGrid = new ScenePropertyGrid(GuiManager.Cursor);
GuiManager.AddWindow(mScenePropertyGrid);
mScenePropertyGrid.X = mScenePropertyGrid.ScaleX;
mScenePropertyGrid.Y = 75.7f;
mScenePropertyGrid.ShowPropertyGridOnStrongSelect = true;
mScenePropertyGrid.HasCloseButton = true;
mScenePropertyGrid.Visible = false;
mScenePropertyGrid.UndoInstructions = UndoManager.Instructions;
#endregion
#region ShapeCollectionPropertyGrid
mShapeCollectionPropertyGrid = new ShapeCollectionPropertyGrid(GuiManager.Cursor);
GuiManager.AddWindow(mShapeCollectionPropertyGrid);
mShapeCollectionPropertyGrid.ShowPropertyGridOnStrongSelectAxisAlignedCube = true;
mShapeCollectionPropertyGrid.ShowPropertyGridOnStrongSelectAxisAlignedRectangle = true;
mShapeCollectionPropertyGrid.ShowPropertyGridOnStrongSelectCircle = true;
mShapeCollectionPropertyGrid.ShowPropertyGridOnStrongSelectPolygon = true;
mShapeCollectionPropertyGrid.ShowPropertyGridOnStrongSelectSphere = true;
mShapeCollectionPropertyGrid.HasCloseButton = true;
mShapeCollectionPropertyGrid.Visible = false;
mShapeCollectionPropertyGrid.UndoInstructions = UndoManager.Instructions;
#endregion
PropertyGrid.SetNewWindowEvent<FlatRedBall.AI.Pathfinding.PositionedNode>(CreatePositionedNodePropertyGrid);
PropertyGrid.SetNewWindowEvent<Color>(CreateColorPropertyGrid);
#region EditorPropertiesGrid
mEditorPropertiesGrid = new EditorPropertiesGrid();
mEditorPropertiesGrid.Visible = false;
#endregion
}
#endregion
#endregion
}
}
|
GorillaOne/FlatRedBall
|
FRBDK/AIEditor/AIEditor/AIEditor/Gui/GuiData.cs
|
C#
|
mit
| 7,748 |
// flow-typed signature: 573c576fe34eb3c3c65dd7a9c90a46d2
// flow-typed version: b43dff3e0e/http-errors_v1.x.x/flow_>=v0.25.x
declare module 'http-errors' {
declare class SpecialHttpError extends HttpError {
constructor(): SpecialHttpError;
}
declare class HttpError extends Error {
expose: bool;
message: string;
status: number;
statusCode: number;
}
declare module.exports: {
(status?: number, message?: string, props?: Object): HttpError;
HttpError: typeof HttpError;
BadRequest: typeof SpecialHttpError;
Unauthorized: typeof SpecialHttpError;
PaymentRequired: typeof SpecialHttpError;
Forbidden: typeof SpecialHttpError;
NotFound: typeof SpecialHttpError;
MethodNotAllowed: typeof SpecialHttpError;
NotAcceptable: typeof SpecialHttpError;
ProxyAuthenticationRequired: typeof SpecialHttpError;
RequestTimeout: typeof SpecialHttpError;
Conflict: typeof SpecialHttpError;
Gone: typeof SpecialHttpError;
LengthRequired: typeof SpecialHttpError;
PreconditionFailed: typeof SpecialHttpError;
PayloadTooLarge: typeof SpecialHttpError;
URITooLong: typeof SpecialHttpError;
UnsupportedMediaType: typeof SpecialHttpError;
RangeNotStatisfiable: typeof SpecialHttpError;
ExpectationFailed: typeof SpecialHttpError;
ImATeapot: typeof SpecialHttpError;
MisdirectedRequest: typeof SpecialHttpError;
UnprocessableEntity: typeof SpecialHttpError;
Locked: typeof SpecialHttpError;
FailedDependency: typeof SpecialHttpError;
UnorderedCollection: typeof SpecialHttpError;
UpgradeRequired: typeof SpecialHttpError;
PreconditionRequired: typeof SpecialHttpError;
TooManyRequests: typeof SpecialHttpError;
RequestHeaderFieldsTooLarge: typeof SpecialHttpError;
UnavailableForLegalReasons: typeof SpecialHttpError;
InternalServerError: typeof SpecialHttpError;
NotImplemented: typeof SpecialHttpError;
BadGateway: typeof SpecialHttpError;
ServiceUnavailable: typeof SpecialHttpError;
GatewayTimeout: typeof SpecialHttpError;
HTTPVersionNotSupported: typeof SpecialHttpError;
VariantAlsoNegotiates: typeof SpecialHttpError;
InsufficientStorage: typeof SpecialHttpError;
LoopDetected: typeof SpecialHttpError;
BandwidthLimitExceeded: typeof SpecialHttpError;
NotExtended: typeof SpecialHttpError;
NetworkAuthenticationRequired: typeof SpecialHttpError;
}
}
|
conveyal/scenario-editor
|
flow-typed/npm/http-errors_v1.x.x.js
|
JavaScript
|
mit
| 2,439 |
---
layout: nav_menu_item
title: Sales/Billing Document Integration
date: 2016-03-07 06:18
author: jeremy.buller
comments: true
categories: [AvaTax 16 Certification navigation]
---
|
anyarms/developer-dot
|
_drafts/deprecated/nav items/2016-03-07-salesbilling-document-integration.md
|
Markdown
|
mit
| 191 |
/*!
* Start Bootstrap - New Age v3.3.7 (http://startbootstrap.com/template-overviews/new-age)
* Copyright 2013-2016 Start Bootstrap
* Licensed under MIT (https://github.com/BlackrockDigital/startbootstrap/blob/gh-pages/LICENSE)
*/
.heading-font {
font-family: 'Catamaran', 'Helvetica', 'Arial', 'sans-serif';
font-weight: 200;
letter-spacing: 1px;
}
.body-font {
font-family: 'Muli', 'Helvetica', 'Arial', 'sans-serif';
}
.alt-font {
font-family: 'Lato', 'Helvetica', 'Arial', 'sans-serif';
text-transform: uppercase;
letter-spacing: 2px;
}
html,
body {
height: 100%;
width: 100%;
}
body {
font-family: 'Muli', 'Helvetica', 'Arial', 'sans-serif';
}
a {
-webkit-transition: all 0.35s;
-moz-transition: all 0.35s;
transition: all 0.35s;
color: #fdcc52;
}
a:hover,
a:focus {
color: #fcbd20;
}
hr {
max-width: 100px;
margin: 25px auto 0;
border-width: 1px;
border-color: rgba(34, 34, 34, 0.1);
}
hr.light {
border-color: white;
}
h1,
h2,
h3,
h4,
h5,
h6 {
font-family: 'Catamaran', 'Helvetica', 'Arial', 'sans-serif';
font-weight: 200;
letter-spacing: 1px;
}
p {
font-size: 18px;
line-height: 1.5;
margin-bottom: 20px;
}
.navbar-default {
background-color: white;
border-color: rgba(34, 34, 34, 0.05);
-webkit-transition: all 0.35s;
-moz-transition: all 0.35s;
transition: all 0.35s;
font-family: 'Catamaran', 'Helvetica', 'Arial', 'sans-serif';
font-weight: 200;
letter-spacing: 1px;
}
.navbar-default .navbar-header .navbar-brand {
font-family: 'Catamaran', 'Helvetica', 'Arial', 'sans-serif';
font-weight: 200;
letter-spacing: 1px;
color: #fdcc52;
}
.navbar-default .navbar-header .navbar-brand:hover,
.navbar-default .navbar-header .navbar-brand:focus {
color: #fcbd20;
}
.navbar-default .navbar-header .navbar-toggle {
font-size: 12px;
color: #222222;
padding: 8px 10px;
}
.navbar-default .nav > li > a {
font-family: 'Lato', 'Helvetica', 'Arial', 'sans-serif';
text-transform: uppercase;
letter-spacing: 2px;
font-size: 11px;
}
.navbar-default .nav > li > a,
.navbar-default .nav > li > a:focus {
color: #222222;
}
.navbar-default .nav > li > a:hover,
.navbar-default .nav > li > a:focus:hover {
color: #fdcc52;
}
.navbar-default .nav > li.active > a,
.navbar-default .nav > li.active > a:focus {
color: #fdcc52 !important;
background-color: transparent;
}
.navbar-default .nav > li.active > a:hover,
.navbar-default .nav > li.active > a:focus:hover {
background-color: transparent;
}
@media (min-width: 768px) {
.navbar-default {
background-color: transparent;
border-color: transparent;
}
.navbar-default .navbar-header .navbar-brand {
color: rgba(255, 255, 255, 0.7);
}
.navbar-default .navbar-header .navbar-brand:hover,
.navbar-default .navbar-header .navbar-brand:focus {
color: white;
}
.navbar-default .nav > li > a,
.navbar-default .nav > li > a:focus {
color: rgba(255, 255, 255, 0.7);
}
.navbar-default .nav > li > a:hover,
.navbar-default .nav > li > a:focus:hover {
color: white;
}
.navbar-default.affix {
background-color: white;
border-color: rgba(34, 34, 34, 0.1);
}
.navbar-default.affix .navbar-header .navbar-brand {
color: #222222;
}
.navbar-default.affix .navbar-header .navbar-brand:hover,
.navbar-default.affix .navbar-header .navbar-brand:focus {
color: #fdcc52;
}
.navbar-default.affix .nav > li > a,
.navbar-default.affix .nav > li > a:focus {
color: #222222;
}
.navbar-default.affix .nav > li > a:hover,
.navbar-default.affix .nav > li > a:focus:hover {
color: #fdcc52;
}
}
header {
position: relative;
width: 100%;
min-height: auto;
overflow-y: hidden;
background: url("../img/bg-pattern.png"), #7b4397;
/* fallback for old browsers */
background: url("../img/bg-pattern.png"), -webkit-linear-gradient(to left, #7b4397, #dc2430);
/* Chrome 10-25, Safari 5.1-6 */
background: url("../img/bg-pattern.png"), linear-gradient(to left, #7b4397, #dc2430);
/* W3C, IE 10+/ Edge, Firefox 16+, Chrome 26+, Opera 12+, Safari 7+ */
color: white;
}
header .header-content {
text-align: center;
padding: 150px 0 50px;
position: relative;
}
header .header-content .header-content-inner {
position: relative;
max-width: 500px;
margin: 0 auto;
}
header .header-content .header-content-inner h1 {
margin-top: 0;
margin-bottom: 30px;
font-size: 30px;
}
header .header-content .header-content-inner .list-badges {
margin-bottom: 25px;
}
header .header-content .header-content-inner .list-badges img {
height: 50px;
margin-bottom: 25px;
}
header .device-container {
max-width: 300px;
margin: 0 auto 100px;
}
header .device-container .screen img {
border-radius: 3px;
}
@media (min-width: 768px) {
header {
min-height: 100%;
}
header .header-content {
text-align: left;
padding: 0;
height: 100vh;
}
header .header-content .header-content-inner {
max-width: none;
margin: 0;
position: absolute;
top: 50%;
transform: translateY(-50%);
}
header .header-content .header-content-inner h1 {
font-size: 35px;
}
header .device-container {
max-width: none;
max-height: calc(0vh);
margin: 100px auto 0;
}
}
@media (min-width: 992px) {
header .header-content .header-content-inner h1 {
font-size: 50px;
}
}
section {
padding: 100px 0;
}
section h2 {
font-size: 50px;
}
section.download {
padding: 150px 0;
position: relative;
}
section.download h2 {
margin-top: 0;
font-size: 50px;
}
section.download .badges .badge-link {
display: block;
margin-bottom: 25px;
}
section.download .badges .badge-link:last-child {
margin-bottom: 0;
}
section.download .badges .badge-link img {
height: 60px;
}
@media (min-width: 768px) {
section.download .badges .badge-link {
display: inline-block;
margin-bottom: 0;
}
}
@media (min-width: 768px) {
section.download h2 {
font-size: 70px;
}
}
section.projects .section-heading {
margin-bottom: 100px;
}
section.projects .section-heading h2 {
margin-top: 0;
}
section.projects .section-heading p {
margin-bottom: 0;
}
section.projects .device-container,
section.projects .feature-item {
max-width: 300px;
margin: 0 auto;
}
section.projects .device-container {
margin-bottom: 100px;
}
@media (min-width: 992px) {
section.projects .device-container {
margin-bottom: 0;
}
}
section.projects .feature-item {
text-align: center;
margin-bottom: 100px;
}
section.projects .feature-item h3 {
font-size: 30px;
}
section.projects .feature-item i {
font-size: 80px;
background: -webkit-linear-gradient(to left, #7b4397, #dc2430);
background: linear-gradient(to left, #7b4397, #dc2430);
-webkit-background-clip: text;
-webkit-text-fill-color: transparent;
}
@media (min-width: 992px) {
section.projects .device-container,
section.projects .feature-item {
max-width: none;
}
}
section.cta {
position: relative;
-webkit-background-size: cover;
-moz-background-size: cover;
background-size: cover;
-o-background-size: cover;
background-position: center;
background-image: url('../img/bg-cta.jpg');
padding: 250px 0;
}
section.cta .cta-content {
position: relative;
z-index: 1;
}
section.cta .cta-content h2 {
margin-top: 0;
margin-bottom: 25px;
color: white;
max-width: 450px;
font-size: 50px;
}
@media (min-width: 768px) {
section.cta .cta-content h2 {
font-size: 80px;
}
}
section.cta .overlay {
height: 100%;
width: 100%;
background-color: rgba(0, 0, 0, 0.5);
position: absolute;
top: 0;
left: 0;
}
section.contact {
text-align: center;
}
section.contact h2 {
margin-top: 0;
margin-bottom: 25px;
}
section.contact h2 i {
color: #dd4b39;
}
section.contact ul.list-social {
margin-bottom: 0;
}
section.contact ul.list-social li a {
display: block;
height: 80px;
width: 80px;
line-height: 80px;
font-size: 40px;
border-radius: 100%;
color: white;
}
section.contact ul.list-social li.social-twitter a {
background-color: #1da1f2;
}
section.contact ul.list-social li.social-twitter a:hover {
background-color: #0d95e8;
}
section.contact ul.list-social li.social-facebook a {
background-color: #3b5998;
}
section.contact ul.list-social li.social-facebook a:hover {
background-color: #344e86;
}
section.contact ul.list-social li.social-google-plus a {
background-color: #dd4b39;
}
section.contact ul.list-social li.social-google-plus a:hover {
background-color: #d73925;
}
footer {
background-color: #222222;
padding: 25px 0;
color: rgba(255, 255, 255, 0.3);
text-align: center;
}
footer p {
font-size: 12px;
margin: 0;
}
footer ul {
margin-bottom: 0;
}
footer ul li a {
font-size: 12px;
color: rgba(255, 255, 255, 0.3);
}
footer ul li a:hover,
footer ul li a:focus,
footer ul li a:active,
footer ul li a.active {
text-decoration: none;
}
.bg-primary {
background: #fdcc52;
background: -webkit-linear-gradient(#fdcc52, #fdc539);
background: linear-gradient(#fdcc52, #fdc539);
}
.text-primary {
color: #fdcc52;
}
.no-gutter > [class*='col-'] {
padding-right: 0;
padding-left: 0;
}
.btn-outline {
color: white;
border-color: white;
border: 1px solid;
}
.btn-outline:hover,
.btn-outline:focus,
.btn-outline:active,
.btn-outline.active {
color: white;
background-color: #fdcc52;
border-color: #fdcc52;
}
.btn {
font-family: 'Lato', 'Helvetica', 'Arial', 'sans-serif';
text-transform: uppercase;
letter-spacing: 2px;
border-radius: 300px;
}
.btn-xl {
padding: 15px 45px;
font-size: 11px;
}
::-moz-selection {
color: white;
text-shadow: none;
background: #222222;
}
::selection {
color: white;
text-shadow: none;
background: #222222;
}
img::selection {
color: white;
background: transparent;
}
img::-moz-selection {
color: white;
background: transparent;
}
body {
webkit-tap-highlight-color: #222222;
}
|
justincadburywong/justincadburywong.github.io
|
css/new-age.css
|
CSS
|
mit
| 9,892 |
using System;
namespace Microsoft.eShopOnContainers.Services.Catalog.API.Model
{
public class CatalogItem
{
public int Id { get; set; }
public string Name { get; set; }
public string Description { get; set; }
public decimal Price { get; set; }
public string PictureUri { get; set; }
public int CatalogTypeId { get; set; }
public CatalogType CatalogType { get; set; }
public int CatalogBrandId { get; set; }
public CatalogBrand CatalogBrand { get; set; }
public CatalogItem() { }
}
}
|
oferns/eShopOnContainers
|
src/Services/Catalog/Catalog.API/Model/CatalogItem.cs
|
C#
|
mit
| 593 |
<?xml version="1.0" encoding="iso-8859-1"?>
<!DOCTYPE html
PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
<head>
<title>Class: Gem::Package::TarReader::UnexpectedEOF</title>
<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1" />
<meta http-equiv="Content-Script-Type" content="text/javascript" />
<link rel="stylesheet" href="../../../.././rdoc-style.css" type="text/css" media="screen" />
<script type="text/javascript">
// <![CDATA[
function popupCode( url ) {
window.open(url, "Code", "resizable=yes,scrollbars=yes,toolbar=no,status=no,height=150,width=400")
}
function toggleCode( id ) {
if ( document.getElementById )
elem = document.getElementById( id );
else if ( document.all )
elem = eval( "document.all." + id );
else
return false;
elemStyle = elem.style;
if ( elemStyle.display != "block" ) {
elemStyle.display = "block"
} else {
elemStyle.display = "none"
}
return true;
}
// Make codeblocks hidden by default
document.writeln( "<style type=\"text/css\">div.method-source-code { display: none }</style>" )
// ]]>
</script>
</head>
<body>
<div id="classHeader">
<table class="header-table">
<tr class="top-aligned-row">
<td><strong>Class</strong></td>
<td class="class-name-in-header">Gem::Package::TarReader::UnexpectedEOF</td>
</tr>
<tr class="top-aligned-row">
<td><strong>In:</strong></td>
<td>
<a href="../../../../files/lib/rubygems/package/tar_reader_rb.html">
lib/rubygems/package/tar_reader.rb
</a>
<br />
</td>
</tr>
<tr class="top-aligned-row">
<td><strong>Parent:</strong></td>
<td>
StandardError
</td>
</tr>
</table>
</div>
<!-- banner header -->
<div id="bodyContent">
<div id="contextContent">
<div id="description">
<p>
Raised if the tar IO is not seekable
</p>
</div>
</div>
</div>
<!-- if includes -->
<div id="section">
<!-- if method_list -->
</div>
<div id="validator-badges">
<p><small><a href="http://validator.w3.org/check/referer">[Validate]</a></small></p>
</div>
</body>
</html>
|
michaelsync/Giles
|
tools/Rake/lib/ruby/gems/1.8/doc/rubygems-1.5.2/rdoc/classes/Gem/Package/TarReader/UnexpectedEOF.html
|
HTML
|
mit
| 2,615 |
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>JSDoc: Source: flags.js</title>
<script src="scripts/prettify/prettify.js"> </script>
<script src="scripts/prettify/lang-css.js"> </script>
<!--[if lt IE 9]>
<script src="//html5shiv.googlecode.com/svn/trunk/html5.js"></script>
<![endif]-->
<link type="text/css" rel="stylesheet" href="styles/prettify-tomorrow.css">
<link type="text/css" rel="stylesheet" href="styles/jsdoc-default.css">
</head>
<body>
<div id="main">
<h1 class="page-title">Source: flags.js</h1>
<section>
<article>
<pre class="prettyprint source linenums"><code>/**
* just.js extension package
*
* @author danikas2k2 ([email protected])
*/
"use strict";
var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
require("./global");
module.exports = (function () {
var Flags = (function () {
/**
*
* @param flag {number|Flags}
* @return {Flags}
* @constructor
*/
function Flags(flag) {
_classCallCheck(this, Flags);
this.reset(flag);
}
_createClass(Flags, [{
key: "valueOf",
/**
*
* @return {number}
*/
value: function valueOf() {
return this.flags;
}
}, {
key: "toString",
/**
*
* @return {string}
*/
value: function toString() {
return this.flags.toString();
}
}, {
key: "is",
/**
*
* @param flag {number|Flags}
* @return {boolean}
*/
value: function is(flag) {
return (this.flags & flag) == flag;
}
}, {
key: "set",
/**
*
* @param flag {number|Flags}
* @return {Flags}
*/
value: function set(flag) {
this.flags = this.flags | flag;
return this;
}
}, {
key: "unset",
/**
*
* @param flag {number|Flags}
* @return {Flags}
*/
value: function unset(flag) {
this.flags = this.flags & ~flag;
return this;
}
}, {
key: "reset",
/**
*
* @param flag {number|Flags}
* @return {Flags}
*/
value: function reset(flag) {
this.flags = isNaN(flag) ? 0 : +flag;
return this;
}
}]);
return Flags;
})();
return globalScope().Flags = Flags;
})();
</code></pre>
</article>
</section>
</div>
<nav>
<h2><a href="index.html">Home</a></h2><h3>Global</h3><ul><li><a href="global.html#value">value</a></li></ul>
</nav>
<br class="clear">
<footer>
Documentation generated by <a href="https://github.com/jsdoc3/jsdoc">JSDoc 3.3.2</a> on Thu Jul 09 2015 00:27:28 GMT+0300 (EEST)
</footer>
<script> prettyPrint(); </script>
<script src="scripts/linenumber.js"> </script>
</body>
</html>
|
danikas2k2/just
|
doc/flags.js.html
|
HTML
|
mit
| 3,993 |
require 'cgi'
require 'nkf'
class Mechanize::Util
# default mime type data for Page::Image#mime_type.
# You can use another Apache-compatible mimetab.
# mimetab = WEBrick::HTTPUtils.load_mime_types('/etc/mime.types')
# Mechanize::Util::DefaultMimeTypes.replace(mimetab)
DefaultMimeTypes = WEBrick::HTTPUtils::DefaultMimeTypes
class << self
# Builds a query string from a given enumerable object
# +parameters+. This method uses Mechanize::Util.each_parameter
# as preprocessor, which see.
def build_query_string(parameters, enc = nil)
each_parameter(parameters).inject(nil) { |s, (k, v)|
# WEBrick::HTTP.escape* has some problems about m17n on ruby-1.9.*.
(s.nil? ? '' : s << '&') << [CGI.escape(k.to_s), CGI.escape(v.to_s)].join('=')
} || ''
end
# Parses an enumerable object +parameters+ and iterates over the
# key-value pairs it contains.
#
# +parameters+ may be a hash, or any enumerable object which
# iterates over [key, value] pairs, typically an array of arrays.
#
# If a key is paired with an array-like object, the pair is
# expanded into multiple occurrences of the key, one for each
# element of the array. e.g. { a: [1, 2] } => [:a, 1], [:a, 2]
#
# If a key is paired with a hash-like object, the pair is expanded
# into hash-like multiple pairs, one for each pair of the hash.
# e.g. { a: { x: 1, y: 2 } } => ['a[x]', 1], ['a[y]', 2]
#
# An array-like value is allowed to be specified as hash value.
# e.g. { a: { q: [1, 2] } } => ['a[q]', 1], ['a[q]', 2]
#
# For a non-array-like, non-hash-like value, the key-value pair is
# yielded as is.
def each_parameter(parameters, &block)
return to_enum(__method__, parameters) if block.nil?
parameters.each { |key, value|
each_parameter_1(key, value, &block)
}
end
private
def each_parameter_1(key, value, &block)
return if key.nil?
case
when s = String.try_convert(value)
yield [key, s]
when a = Array.try_convert(value)
a.each { |avalue|
yield [key, avalue]
}
when h = Hash.try_convert(value)
h.each { |hkey, hvalue|
each_parameter_1('%s[%s]' % [key, hkey], hvalue, &block)
}
else
yield [key, value]
end
end
end
# Converts string +s+ from +code+ to UTF-8.
def self.from_native_charset(s, code, ignore_encoding_error = false, log = nil)
return s unless s && code
return s unless Mechanize.html_parser == Nokogiri::HTML
begin
s.encode(code)
rescue EncodingError => ex
log.debug("from_native_charset: #{ex.class}: form encoding: #{code.inspect} string: #{s}") if log
if ignore_encoding_error
s
else
raise
end
end
end
def self.html_unescape(s)
return s unless s
s.gsub(/&(\w+|#[0-9]+);/) { |match|
number = case match
when /&(\w+);/
Mechanize.html_parser::NamedCharacters[$1]
when /&#([0-9]+);/
$1.to_i
end
number ? ([number].pack('U') rescue match) : match
}
end
case NKF::BINARY
when Encoding
def self.guess_encoding(src)
# NKF.guess of JRuby may return nil
NKF.guess(src) || Encoding::US_ASCII
end
else
# Old NKF from 1.8, still bundled with Rubinius
NKF_ENCODING_MAP = {
NKF::UNKNOWN => Encoding::US_ASCII,
NKF::BINARY => Encoding::ASCII_8BIT,
NKF::ASCII => Encoding::US_ASCII,
NKF::JIS => Encoding::ISO_2022_JP,
NKF::EUC => Encoding::EUC_JP,
NKF::SJIS => Encoding::Shift_JIS,
NKF::UTF8 => Encoding::UTF_8,
NKF::UTF16 => Encoding::UTF_16BE,
NKF::UTF32 => Encoding::UTF_32BE,
}
def self.guess_encoding(src)
NKF_ENCODING_MAP[NKF.guess(src)]
end
end
def self.detect_charset(src)
if src
guess_encoding(src).name.upcase
else
Encoding::ISO8859_1.name
end
end
def self.uri_escape str, unsafe = nil
@parser ||= begin
URI::Parser.new
rescue NameError
URI
end
if URI == @parser then
unsafe ||= URI::UNSAFE
else
unsafe ||= @parser.regexp[:UNSAFE]
end
@parser.escape str, unsafe
end
def self.uri_unescape str
@parser ||= begin
URI::Parser.new
rescue NameError
URI
end
@parser.unescape str
end
end
|
eligoenergy/mechanize
|
lib/mechanize/util.rb
|
Ruby
|
mit
| 4,585 |
require 'english/class'
class English
# = Noun Number Inflections
#
# This module provides english singular <-> plural noun inflections.
module Inflect
@singular_of = {}
@plural_of = {}
@singular_rules = []
@plural_rules = []
# This class provides the DSL for creating inflections, you can add additional rules.
# Examples:
#
# word "ox", "oxen"
# word "octopus", "octopi"
# word "man", "men"
#
# rule "lf", "lves"
#
# word "equipment"
#
# Rules are evaluated by size, so rules you add to override specific cases should be longer than the rule
# it overrides. For instance, if you want "pta" to pluralize to "ptas", even though a general purpose rule
# for "ta" => "tum" already exists, simply add a new rule for "pta" => "ptas", and it will automatically win
# since it is longer than the old rule.
#
# Also, single-word exceptions win over general words ("ox" pluralizes to "oxen", because it's a single word
# exception, even though "fox" pluralizes to "foxes")
class << self
# Define a general two-way exception.
#
# This also defines a general rule, so foo_child will correctly become
# foo_children.
#
# Whole words also work if they are capitalized (Goose => Geese).
def word(singular, plural=nil)
plural = singular unless plural
singular_word(singular, plural)
plural_word(singular, plural)
rule(singular, plural)
end
# Define a singularization exception.
def singular_word(singular, plural)
@singular_of[plural] = singular
@singular_of[plural.capitalize] = singular.capitalize
end
# Define a pluralization exception.
def plural_word(singular, plural)
@plural_of[singular] = plural
@plural_of[singular.capitalize] = plural.capitalize
end
# Define a general rule.
def rule(singular, plural)
singular_rule(singular, plural)
plural_rule(singular, plural)
end
# Define a singularization rule.
def singular_rule(singular, plural)
@singular_rules << [singular, plural]
end
# Define a plurualization rule.
def plural_rule(singular, plural)
@plural_rules << [singular, plural]
end
# Read prepared singularization rules.
def singularization_rules
if defined?(@singularization_regex) && @singularization_regex
return [@singularization_regex, @singularization_hash]
end
# No sorting needed: Regexen match on longest string
@singularization_regex = Regexp.new("(" + @singular_rules.map {|s,p| p}.join("|") + ")$", "i")
@singularization_hash = Hash[*@singular_rules.flatten].invert
[@singularization_regex, @singularization_hash]
end
# Read prepared singularization rules.
#def singularization_rules
# return @singularization_rules if @singularization_rules
# sorted = @singular_rules.sort_by{ |s, p| "#{p}".size }.reverse
# @singularization_rules = sorted.collect do |s, p|
# [ /#{p}$/, "#{s}" ]
# end
#end
# Read prepared pluralization rules.
def pluralization_rules
if defined?(@pluralization_regex) && @pluralization_regex
return [@pluralization_regex, @pluralization_hash]
end
@pluralization_regex = Regexp.new("(" + @plural_rules.map {|s,p| s}.join("|") + ")$", "i")
@pluralization_hash = Hash[*@plural_rules.flatten]
[@pluralization_regex, @pluralization_hash]
end
# Read prepared pluralization rules.
#def pluralization_rules
# return @pluralization_rules if @pluralization_rules
# sorted = @plural_rules.sort_by{ |s, p| "#{s}".size }.reverse
# @pluralization_rules = sorted.collect do |s, p|
# [ /#{s}$/, "#{p}" ]
# end
#end
#
def singular_of ; @singular_of ; end
#
def plural_of ; @plural_of ; end
# Convert an English word from plurel to singular.
#
# "boys".singular #=> boy
# "tomatoes".singular #=> tomato
#
def singular(word)
return "" if word == ""
if result = singular_of[word]
return result.dup
end
result = word.dup
regex, hash = singularization_rules
result.sub!(regex) {|m| hash[m]}
singular_of[word] = result
return result
#singularization_rules.each do |(match, replacement)|
# break if result.gsub!(match, replacement)
#end
#return result
end
# Alias for #singular (a Railism).
#
alias_method(:singularize, :singular)
# Convert an English word from singular to plurel.
#
# "boy".plural #=> boys
# "tomato".plural #=> tomatoes
#
def plural(word)
return "" if word == ""
if result = plural_of[word]
return result.dup
end
#return self.dup if /s$/ =~ self # ???
result = word.dup
regex, hash = pluralization_rules
result.sub!(regex) {|m| hash[m]}
plural_of[word] = result
return result
#pluralization_rules.each do |(match, replacement)|
# break if result.gsub!(match, replacement)
#end
#return result
end
# Alias for #plural (a Railism).
alias_method(:pluralize, :plural)
# Clear all rules.
def clear(type = :all)
if type == :singular || type == :all
@singular_of = {}
@singular_rules = []
@singularization_rules, @singularization_regex = nil, nil
end
if type == :plural || type == :all
@singular_of = {}
@singular_rules = []
@singularization_rules, @singularization_regex = nil, nil
end
end
end
# One argument means singular and plural are the same.
word 'equipment'
word 'information'
word 'money'
word 'species'
word 'series'
word 'fish'
word 'sheep'
word 'moose'
word 'hovercraft'
word 'news'
word 'rice'
word 'plurals'
# Two arguments defines a singular and plural exception.
word 'Swiss' , 'Swiss'
word 'alias' , 'aliases'
word 'analysis' , 'analyses'
#word 'axis' , 'axes'
word 'basis' , 'bases'
word 'buffalo' , 'buffaloes'
word 'child' , 'children'
#word 'cow' , 'kine'
word 'crisis' , 'crises'
word 'criterion' , 'criteria'
word 'datum' , 'data'
word 'goose' , 'geese'
word 'hive' , 'hives'
word 'index' , 'indices'
word 'life' , 'lives'
word 'louse' , 'lice'
word 'man' , 'men'
word 'matrix' , 'matrices'
word 'medium' , 'media'
word 'mouse' , 'mice'
word 'movie' , 'movies'
word 'octopus' , 'octopi'
word 'ox' , 'oxen'
word 'person' , 'people'
word 'potato' , 'potatoes'
word 'quiz' , 'quizzes'
word 'shoe' , 'shoes'
word 'status' , 'statuses'
word 'testis' , 'testes'
word 'thesis' , 'theses'
word 'thief' , 'thieves'
word 'tomato' , 'tomatoes'
word 'torpedo' , 'torpedoes'
word 'vertex' , 'vertices'
word 'virus' , 'viri'
word 'wife' , 'wives'
# One-way singularization exception (convert plural to singular).
singular_word 'cactus', 'cacti'
# One-way pluralizaton exception (convert singular to plural).
plural_word 'axis', 'axes'
# General rules.
rule 'rf' , 'rves'
rule 'ero' , 'eroes'
rule 'ch' , 'ches'
rule 'sh' , 'shes'
rule 'ss' , 'sses'
#rule 'ess' , 'esses'
rule 'ta' , 'tum'
rule 'ia' , 'ium'
rule 'ra' , 'rum'
rule 'ay' , 'ays'
rule 'ey' , 'eys'
rule 'oy' , 'oys'
rule 'uy' , 'uys'
rule 'y' , 'ies'
rule 'x' , 'xes'
rule 'lf' , 'lves'
rule 'ffe' , 'ffes'
rule 'af' , 'aves'
rule 'us' , 'uses'
rule 'ouse' , 'ouses'
rule 'osis' , 'oses'
rule 'ox' , 'oxes'
rule '' , 's'
# One-way singular rules.
singular_rule 'of' , 'ofs' # proof
singular_rule 'o' , 'oes' # hero, heroes
#singular_rule 'f' , 'ves'
# One-way plural rules.
plural_rule 's' , 'ses'
plural_rule 'ive' , 'ives' # don't want to snag wife
plural_rule 'fe' , 'ves' # don't want to snag perspectives
end
#
def self.singular(string)
English::Inflect.singular(string)
end
#
def self.plural(string)
English::Inflect.plural(string)
end
# Convert an English word from plurel to singular.
#
# "boys".singular #=> boy
# "tomatoes".singular #=> tomato
#
def singular
self.class.singular(@self)
end
# Alias for #singular.
alias_method(:singularize, :singular)
# Convert an English word from plurel to singular.
#
# "boys".singular #=> boy
# "tomatoes".singular #=> tomato
#
def plural
self.class.plural(@self)
end
# Alias for #plural.
alias_method(:pluralize, :plural)
end
|
rubygengo/english
|
lib/english/inflect.rb
|
Ruby
|
mit
| 9,263 |
#ifndef MOAISPINEANIMATIONMIXTABLE_H
#define MOAISPINEANIMATIONMIXTABLE_H
#include <moai-core/headers.h>
//----------------------------------------------------------------//
class MOAISpineAnimationMixEntry
{
public:
STLString mSrc;
STLString mTarget;
// STLString mOverlay;
float mDuration;
float mDelay; //delay before playing target animation, should be < mDuration
};
//----------------------------------------------------------------//
class MOAISpineAnimationMixTable :
public virtual MOAILuaObject
{
private:
typedef STLMap < STLString, MOAISpineAnimationMixEntry* >::iterator MixMapIt;
STLMap < STLString, MOAISpineAnimationMixEntry* > mMixMap ;
static int _setMix ( lua_State* L );
static int _getMix ( lua_State* L );
MOAISpineAnimationMixEntry* AffirmMix( STLString src, STLString target );
public:
MOAISpineAnimationMixEntry* GetMix( STLString src, STLString target );
// void SetMix();
DECL_LUA_FACTORY( MOAISpineAnimationMixTable )
MOAISpineAnimationMixTable();
~MOAISpineAnimationMixTable();
void RegisterLuaClass ( MOAILuaState& state );
void RegisterLuaFuncs ( MOAILuaState& state );
};
#endif
|
tommo/gii
|
test/host/extension/MOAISpineAnimationMixTable.h
|
C
|
mit
| 1,158 |
var PixiText = require('../../lib/pixi/src/core/text/Text'),
utils = require('../core/utils'),
math = require('../../lib/pixi/src/core/math'),
Sprite = require('../display/Sprite'),
CONST = require('../core/const');
function Text(text, style, resolution){
this._init(text, style, resolution);
}
Text.prototype = Object.create(PixiText.prototype);
Text.prototype.constructor = Text;
Text.fontPropertiesCache = {};
Text.fontPropertiesCanvas = document.createElement('canvas');
Text.fontPropertiesContext = Text.fontPropertiesCanvas.getContext('2d');
Text.prototype._init = function(text, style, resolution){
text = text || ' ';
PixiText.call(this, text, style, resolution);
this.speed = new math.Point();
this.anchor = new math.Point(0.5, 0.5);
this.pivot = new math.Point(0.5, 0.5);
};
Text.prototype.displayObjectUpdateTransform = function(){
// create some matrix refs for easy access
var pt = this.parent.worldTransform;
var wt = this.worldTransform;
//anchor, pivot, and flip variables
var sx = (this.flipX) ? -this.scale.x : this.scale.x,
sy = (this.flipY) ? -this.scale.y : this.scale.y,
ax = (this.flipX) ? 1-this.anchor.x : this.anchor.x,
ay = (this.flipY) ? 1-this.anchor.y : this.anchor.y,
px = (this.flipX) ? 1-this.pivot.x : this.pivot.x,
py = (this.flipY) ? 1-this.pivot.y : this.pivot.y;
// temporary matrix variables
var a, b, c, d, tx, ty;
//Avoid use _width or _height when are 0
if(!this._width||!this._height){
this._width = this.width/this.scale.x;
this._height = this.height/this.scale.y;
}
var anchorWidth = ax * this._width * sx,
anchorHeight = ay * this._height * sy,
pivotWidth = px * this._width * sx,
pivotHeight = py * this._height * sy;
// so if rotation is between 0 then we can simplify the multiplication process...
if (this.rotation % CONST.PI_2)
{
// check to see if the rotation is the same as the previous render. This means we only need to use sin and cos when rotation actually changes
if (this.rotation !== this.rotationCache)
{
this.rotationCache = this.rotation;
this._sr = Math.sin(this.rotation);
this._cr = Math.cos(this.rotation);
}
// get the matrix values of the displayobject based on its transform properties..
a = this._cr * sx;
b = this._sr * sx;
c = -this._sr * sy;
d = this._cr * sy;
tx = this.position.x + pivotWidth - anchorWidth;
ty = this.position.y + pivotHeight - anchorHeight;
if (pivotWidth || pivotHeight)
{
tx -= pivotWidth * this._cr + pivotHeight * -this._sr;
ty -= pivotWidth * this._sr + pivotHeight * this._cr;
}
// concat the parent matrix with the objects transform.
wt.a = a * pt.a + b * pt.c;
wt.b = a * pt.b + b * pt.d;
wt.c = c * pt.a + d * pt.c;
wt.d = c * pt.b + d * pt.d;
wt.tx = tx * pt.a + ty * pt.c + pt.tx;
wt.ty = tx * pt.b + ty * pt.d + pt.ty;
}
else
{
// lets do the fast version as we know there is no rotation..
a = sx;
d = sy;
tx = this.position.x - anchorWidth;
ty = this.position.y - anchorHeight;
wt.a = a * pt.a;
wt.b = a * pt.b;
wt.c = d * pt.c;
wt.d = d * pt.d;
wt.tx = tx * pt.a + ty * pt.c + pt.tx;
wt.ty = tx * pt.b + ty * pt.d + pt.ty;
}
// multiply the alphas..
this.worldAlpha = this.alpha * this.parent.worldAlpha;
// reset the bounds each time this is called!
this._currentBounds = null;
};
Text.prototype._renderCanvas = function (renderer)
{
if (this.dirty)
{
// this.resolution = 1//renderer.resolution;
this.updateText();
}
//Sprite.prototype._renderCanvas.call(this, renderer);
this._customRenderCanvas(renderer);
};
Text.prototype._customRenderCanvas = function(renderer){
if (this.texture.crop.width <= 0 || this.texture.crop.height <= 0)
{
return;
}
if (this.blendMode !== renderer.currentBlendMode)
{
renderer.currentBlendMode = this.blendMode;
renderer.context.globalCompositeOperation = renderer.blendModes[renderer.currentBlendMode];
}
// Ignore null sources
if (this.texture.valid)
{
var texture = this._texture,
wt = this.worldTransform,
dx,
dy,
width,
height;
var resolution = texture.baseTexture.resolution / renderer.resolution;
renderer.context.globalAlpha = this.worldAlpha;
// If smoothingEnabled is supported and we need to change the smoothing property for this texture
if (renderer.smoothProperty && renderer.currentScaleMode !== texture.baseTexture.scaleMode)
{
renderer.currentScaleMode = texture.baseTexture.scaleMode;
renderer.context[renderer.smoothProperty] = (renderer.currentScaleMode === CONST.SCALE_MODES.LINEAR);
}
// If the texture is trimmed we offset by the trim x/y, otherwise we use the frame dimensions
if(texture.rotate)
{
// cheeky rotation!
var a = wt.a;
var b = wt.b;
wt.a = -wt.c;
wt.b = -wt.d;
wt.c = a;
wt.d = b;
width = texture.crop.height; //TODO: Width assigned to height???
height = texture.crop.width;
dx = (texture.trim) ? texture.trim.y - this.anchor.y * texture.trim.height : this.anchor.y * -texture._frame.height;
dy = (texture.trim) ? texture.trim.x - this.anchor.x * texture.trim.width : this.anchor.x * -texture._frame.width;
}
else
{
width = texture.crop.width;
height = texture.crop.height;
dx = (texture.trim) ? texture.trim.x - this.anchor.x * texture.trim.width : this.anchor.x * -texture._frame.width;
dy = (texture.trim) ? texture.trim.y - this.anchor.y * texture.trim.height : this.anchor.y * -texture._frame.height;
}
// Allow for pixel rounding
if (renderer.roundPixels)
{
renderer.context.setTransform(
wt.a,
wt.b,
wt.c,
wt.d,
(wt.tx * renderer.resolution) | 0,
(wt.ty * renderer.resolution) | 0
);
dx = dx | 0;
dy = dy | 0;
}
else
{
renderer.context.setTransform(
wt.a,
wt.b,
wt.c,
wt.d,
wt.tx * renderer.resolution,
wt.ty * renderer.resolution
);
}
var anchorWidth = this.anchor.x * this._width/resolution,
anchorHeight = this.anchor.y * this._height/resolution;
if (this.tint !== 0xFFFFFF)
{
if (this.cachedTint !== this.tint)
{
this.cachedTint = this.tint;
// TODO clean up caching - how to clean up the caches?
// TODO: dont works with spritesheets
this.tintedTexture = CanvasTinter.getTintedTexture(this, this.tint);
}
renderer.context.drawImage(
this.tintedTexture,
0,
0,
width * resolution * renderer.resolution,
height * resolution * renderer.resolution,
dx / resolution,
dy / resolution,
width * renderer.resolution,
height * renderer.resolution
);
}
else
{
//TODO: cuando la resolución del renderer es mayor a 1 los sprites se muestran mal
renderer.context.drawImage(
texture.baseTexture.source,
texture.crop.x * resolution,
texture.crop.y * resolution,
width * resolution * renderer.resolution,
height * resolution * renderer.resolution,
dx / resolution + anchorWidth,
dy / resolution + anchorHeight,
width * renderer.resolution,
height * renderer.resolution
);
}
}
};
Text.prototype.renderWebGL = function (renderer)
{
if (this.dirty)
{
//this.resolution = 1//renderer.resolution;
this.updateText();
}
Sprite.prototype.renderWebGL.call(this, renderer);
};
Text.prototype.updateText = function (){
var style = this._style;
this.context.font = style.font;
// word wrap
// preserve original text
var outputText = style.wordWrap ? this.wordWrap(this._text) : this._text;
// split text into lines
var lines = outputText.split(/(?:\r\n|\r|\n)/);
// calculate text width
var lineWidths = new Array(lines.length);
var maxLineWidth = 0;
var fontProperties = this.determineFontProperties(style.font);
for (var i = 0; i < lines.length; i++)
{
var lineWidth = this.context.measureText(lines[i]).width;
lineWidths[i] = lineWidth;
maxLineWidth = Math.max(maxLineWidth, lineWidth);
}
var width = maxLineWidth + style.strokeThickness;
if (style.dropShadow)
{
width += style.dropShadowDistance;
}
this.canvas.width = ( width + this.context.lineWidth ) * this.resolution;
// calculate text height
var lineHeight = this.style.lineHeight || fontProperties.fontSize + style.strokeThickness;
var height = lineHeight * lines.length;
if (style.dropShadow)
{
height += style.dropShadowDistance;
}
this.canvas.height = ( height + this._style.padding * 2 ) * this.resolution;
this.context.scale( this.resolution, this.resolution);
if (navigator.isCocoonJS)
{
this.context.clearRect(0, 0, this.canvas.width, this.canvas.height);
}
//this.context.fillStyle="#FF0000";
//this.context.fillRect(0, 0, this.canvas.width, this.canvas.height);
this.context.font = style.font;
this.context.strokeStyle = (typeof style.stroke === "number") ? utils.hex2string(style.stroke) : style.stroke;
this.context.lineWidth = style.strokeThickness;
this.context.textBaseline = style.textBaseline;
this.context.lineJoin = style.lineJoin;
this.context.miterLimit = style.miterLimit;
var linePositionX;
var linePositionY;
if (style.dropShadow)
{
this.context.fillStyle = style.dropShadowColor;
var xShadowOffset = Math.cos(style.dropShadowAngle) * style.dropShadowDistance;
var yShadowOffset = Math.sin(style.dropShadowAngle) * style.dropShadowDistance;
for (i = 0; i < lines.length; i++)
{
linePositionX = style.strokeThickness / 2;
linePositionY = (style.strokeThickness / 2 + i * lineHeight) + fontProperties.ascent;
if (style.align === 'right')
{
linePositionX += maxLineWidth - lineWidths[i];
}
else if (style.align === 'center')
{
linePositionX += (maxLineWidth - lineWidths[i]) / 2;
}
if (style.fill)
{
this.context.fillText(lines[i], linePositionX + xShadowOffset, linePositionY + yShadowOffset + this._style.padding);
}
}
}
//set canvas text styles
this.context.fillStyle = (typeof style.fill === "number") ? utils.hex2string(style.fill) : style.fill;
//draw lines line by line
for (i = 0; i < lines.length; i++)
{
linePositionX = style.strokeThickness / 2;
linePositionY = (style.strokeThickness / 2 + i * lineHeight) + fontProperties.ascent;
if (style.align === 'right')
{
linePositionX += maxLineWidth - lineWidths[i];
}
else if (style.align === 'center')
{
linePositionX += (maxLineWidth - lineWidths[i]) / 2;
}
if (style.stroke && style.strokeThickness)
{
this.context.strokeText(lines[i], linePositionX, linePositionY + this._style.padding);
}
if (style.fill)
{
this.context.fillText(lines[i], linePositionX, linePositionY + this._style.padding);
}
}
this.updateTexture();
};
Text.prototype.setStyle = function(style){
this.style = style;
return this;
};
Text.prototype.setText = function(text, keys){
if(keys)text = utils.parseTextKeys(text, keys);
this.text = text;
return this;
};
Text.prototype.setWordWrap = function(value){
if(value === false){
this.style.wordWrap = value;
}else{
this.style.wordWrap = true;
this.style.wordWrapWidth = value;
}
this.dirty = true;
return this;
};
Text.prototype.containsPoint = Sprite.prototype.containsPoint;
Text.prototype.getLocalBounds = Sprite.prototype.getLocalBounds;
module.exports = Text;
|
TarentolaDigital/perenquen
|
src/display/Text.js
|
JavaScript
|
mit
| 13,176 |
/**
* A wrapper around JSLint to drop things into the console
*
* Copyright (C) 2011 Nikolay Nemshilov
*/
var RightJS = require('./right-server.js');
var JSLint = require('./jslint').JSLINT;
var fs = require('fs');
exports.Linter = new RightJS.Class({
extend: {
Options: {
debug: false, // no debug
devel: false, // no console.log s
evil: false, // no evals
passfail: false, // don't stop on errors
onevar: false, // allow more than one 'var' definition
forin: true , // allow for in without ownershipt checks
indent: 2 , // enforce 2 spaces indent
maxerr: 12 , // max number of errors
},
Okays: [
"Move 'var' declarations to the top of the function.",
"Do not use 'new' for side effects.",
"The Function constructor is eval."
]
},
/**
* Basic constructor
*
* @param {String} the source
* @param {String} the linter options
* @return void
*/
initialize: function(src, options) {
this.source = src;
this.options = options;
},
/**
* Runs the linter
*
* @return {Linter} this
*/
run: function() {
var options = {}, okays = [], patches = '';
// extracting the additional options
try { // skipping non-existing patch files
patches = fs.readFileSync(this.options).toString();
} catch(e) {}
eval(patches);
JSLint.okays = this.constructor.Okays.concat(okays);
JSLint(
fs.readFileSync(this.source).toString(),
Object.merge(this.constructor.Options, options)
);
this.errors = JSLint.errors.compact();
this.failed = this.errors.length > 0;
return this;
},
/**
* Prints out the check report
*
* @return {Linter} this
*/
report: function() {
if (this.errors.empty()) {
console.log("\u001B[32m - JSLint check successfully passed\u001B[0m");
} else {
console.log("\u001B[31m - JSLint check failed in: "+ this.source + "\u001B[0m");
this.errors.each(function(error) {
var report = "\n", j=0, pointer='';
for (; j < error.character-1; j++) { pointer += '-'; }
report += " \u001B[35m"+ error.reason +"\u001B[0m ";
if (error.evidence) {
report += "Line: "+ error.line + ", Char: "+ error.character + "\n";
report += " "+ error.evidence + "\n";
report += " \u001B[33m"+ pointer + "^\u001B[0m";
}
console.log(report);
});
console.log("\n")
}
return this;
}
});
|
rightjs/rightjs-ui
|
util/linter.js
|
JavaScript
|
mit
| 2,541 |
# coding=utf-8
import pygame
import pygame.locals
class Board(object):
"""
Plansza do gry. Odpowiada za rysowanie okna gry.
"""
def __init__(self, width, height):
"""
Konstruktor planszy do gry. Przygotowuje okienko gry.
:param width: szerokość w pikselach
:param height: wysokość w pikselach
"""
self.surface = pygame.display.set_mode((width, height), 0, 32)
pygame.display.set_caption('Game of life')
def draw(self, *args):
"""
Rysuje okno gry
:param args: lista obiektów do narysowania
"""
background = (0, 0, 0)
self.surface.fill(background)
for drawable in args:
drawable.draw_on(self.surface)
# dopiero w tym miejscu następuje fatyczne rysowanie
# w oknie gry, wcześniej tylko ustalaliśmy co i jak ma zostać narysowane
pygame.display.update()
class GameOfLife(object):
"""
Łączy wszystkie elementy gry w całość.
"""
def __init__(self, width, height, cell_size=10):
"""
Przygotowanie ustawień gry
:param width: szerokość planszy mierzona liczbą komórek
:param height: wysokość planszy mierzona liczbą komórek
:param cell_size: bok komórki w pikselach
"""
pygame.init()
self.board = Board(width * cell_size, height * cell_size)
# zegar którego użyjemy do kontrolowania szybkości rysowania
# kolejnych klatek gry
self.fps_clock = pygame.time.Clock()
def run(self):
"""
Główna pętla gry
"""
while not self.handle_events():
# działaj w pętli do momentu otrzymania sygnału do wyjścia
self.board.draw()
self.fps_clock.tick(15)
def handle_events(self):
"""
Obsługa zdarzeń systemowych, tutaj zinterpretujemy np. ruchy myszką
:return True jeżeli pygame przekazał zdarzenie wyjścia z gry
"""
for event in pygame.event.get():
if event.type == pygame.locals.QUIT:
pygame.quit()
return True
# magiczne liczby używane do określenia czy komórka jest żywa
DEAD = 0
ALIVE = 1
class Population(object):
"""
Populacja komórek
"""
def __init__(self, width, height, cell_size=10):
"""
Przygotowuje ustawienia populacji
:param width: szerokość planszy mierzona liczbą komórek
:param height: wysokość planszy mierzona liczbą komórek
:param cell_size: bok komórki w pikselach
"""
self.box_size = cell_size
self.height = height
self.width = width
self.generation = self.reset_generation()
def reset_generation(self):
"""
Tworzy i zwraca macierz pustej populacji
"""
# w pętli wypełnij listę kolumnami
# które także w pętli zostają wypełnione wartością 0 (DEAD)
return [[DEAD for y in xrange(self.height)] for x in xrange(self.width)]
def handle_mouse(self):
# pobierz stan guzików myszki z wykorzystaniem funcji pygame
buttons = pygame.mouse.get_pressed()
if not any(buttons):
# ignoruj zdarzenie jeśli żaden z guzików nie jest wciśnięty
return
# dodaj żywą komórką jeśli wciśnięty jest pierwszy guzik myszki
# będziemy mogli nie tylko dodawać żywe komórki ale także je usuwać
alive = True if buttons[0] else False
# pobierz pozycję kursora na planszy mierzoną w pikselach
x, y = pygame.mouse.get_pos()
# przeliczamy współrzędne komórki z pikseli na współrzędne komórki w macierz
# gracz może kliknąć w kwadracie o szerokości box_size by wybrać komórkę
x /= self.box_size
y /= self.box_size
# ustaw stan komórki na macierzy
self.generation[x][y] = ALIVE if alive else DEAD
def draw_on(self, surface):
"""
Rysuje komórki na planszy
"""
for x, y in self.alive_cells():
size = (self.box_size, self.box_size)
position = (x * self.box_size, y * self.box_size)
color = (255, 255, 255)
thickness = 1
pygame.draw.rect(surface, color, pygame.locals.Rect(position, size), thickness)
def alive_cells(self):
"""
Generator zwracający współrzędne żywych komórek.
"""
for x in range(len(self.generation)):
column = self.generation[x]
for y in range(len(column)):
if column[y] == ALIVE:
# jeśli komórka jest żywa zwrócimy jej współrzędne
yield x, y
# Ta część powinna być zawsze na końcu modułu (ten plik jest modułem)
# chcemy uruchomić naszą grę dopiero po tym jak wszystkie klasy zostaną zadeklarowane
if __name__ == "__main__":
game = GameOfLife(80, 40)
game.run()
|
roninek/python101
|
docs/pygame/life/code1a.py
|
Python
|
mit
| 5,039 |
using System.Linq.Expressions;
using System.Collections.Generic;
namespace Bermuda.ExpressionGeneration
{
public partial class ValueExpression : ExpressionTreeBase
{
public long Value { get; private set; }
public ValueExpression(long value)
{
Value = value;
}
public override string ToString()
{
return string.Format("@{0}", Value.ToString());
}
public override IEnumerable<ExpressionTreeBase> GetChildren()
{
yield break;
}
public override Expression CreateExpression(object context)
{
return Expression.Constant(Value);
}
}
}
|
melnx/Bermuda
|
Bermuda.ExpressionGenerator/QL/ValueExpression.cs
|
C#
|
mit
| 730 |
// String literal types are only valid in overload signatures
function foo(x: any);
function foo(x: 'hi') { }
class C {
foo(x: string);
foo(x: 'hi') { }
}
interface I {
(x: 'a');
(x: 'hi');
foo(x: 'a', y: 'a');
foo(x: 'hi', y: 'hi');
}
var a: {
(x: 'hi');
(x: 'a');
foo(x: 'hi');
foo(x: 'a');
}
var b = {
foo(x: 'hi') { },
foo(x: 'a') { },
}
|
Pajn/prettier
|
tests/typescript/conformance/types/objectTypeLiteral/callSignatures/stringLiteralTypesInImplementationSignatures2.ts
|
TypeScript
|
mit
| 395 |
<?php
namespace PhraseanetSDK\Tests\Cache;
use PhraseanetSDK\Cache\BackendCacheFactory;
use PhraseanetSDK\Exception\RuntimeException;
class BackendCacheFactoryTest extends \PHPUnit_Framework_TestCase
{
/**
* @dataProvider provideValidParameters
*/
public function testCreateSuccess($type, $host, $port, $instanceOf, $classExists)
{
if (null !== $classExists) {
if (!class_exists($classExists)) {
$this->markTestSkipped(sprintf('Unable to find class %s', $classExists));
}
}
$factory = new BackendCacheFactory();
try {
$this->assertInstanceOf($instanceOf, $factory->create($type, $host, $port));
} catch (RuntimeException $e) {
$this->assertContains(ucfirst(strtolower($type)), $e->getMessage());
}
}
public function provideValidParameters()
{
return array(
array('memcache', '127.0.0.1', 11211, 'Doctrine\Common\Cache\MemcacheCache', 'Memcache'),
array('memcache', null, null, 'Doctrine\Common\Cache\MemcacheCache', 'Memcache'),
array('memcached', '127.0.0.1', 11211, 'Doctrine\Common\Cache\MemcachedCache', 'Memcached'),
array('memcached', null, null, 'Doctrine\Common\Cache\MemcachedCache', 'Memcached'),
array('array', '127.0.0.1', 11211, 'Doctrine\Common\Cache\ArrayCache', null),
array('array', null, null, 'Doctrine\Common\Cache\ArrayCache', null),
);
}
/**
* @dataProvider provideInvalidParameters
* @expectedException PhraseanetSDK\Exception\RuntimeException
*/
public function testCreateFailure($type, $host, $port, $classExists)
{
if (null !== $classExists) {
if (!class_exists($classExists)) {
$this->markTestSkipped(sprintf('Unable to find class %s', $classExists));
}
}
$factory = new BackendCacheFactory();
$factory->create($type, $host, $port);
}
public function provideInvalidParameters()
{
return array(
array('memcache', 'nohost', 'noport', 'Memcache'),
array('memcached', 'nohost', 'noport', 'Memcache'),
array('unknown', 'nohost', 'noport', null),
array('unknown', null, null, null),
);
}
}
|
aztech-dev/Phraseanet-PHP-SDK
|
tests/PhraseanetSDK/Tests/Cache/BackendCacheFactoryTest.php
|
PHP
|
mit
| 2,336 |
"""
File-based Checkpoints implementations.
"""
import os
import shutil
from tornado.web import HTTPError
from .checkpoints import (
Checkpoints,
GenericCheckpointsMixin,
)
from .fileio import FileManagerMixin
from IPython.utils import tz
from IPython.utils.path import ensure_dir_exists
from IPython.utils.py3compat import getcwd
from IPython.utils.traitlets import Unicode
class FileCheckpoints(FileManagerMixin, Checkpoints):
"""
A Checkpoints that caches checkpoints for files in adjacent
directories.
Only works with FileContentsManager. Use GenericFileCheckpoints if
you want file-based checkpoints with another ContentsManager.
"""
checkpoint_dir = Unicode(
'.ipynb_checkpoints',
config=True,
help="""The directory name in which to keep file checkpoints
This is a path relative to the file's own directory.
By default, it is .ipynb_checkpoints
""",
)
root_dir = Unicode(config=True)
def _root_dir_default(self):
try:
return self.parent.root_dir
except AttributeError:
return getcwd()
# ContentsManager-dependent checkpoint API
def create_checkpoint(self, contents_mgr, path):
"""Create a checkpoint."""
checkpoint_id = u'checkpoint'
src_path = contents_mgr._get_os_path(path)
dest_path = self.checkpoint_path(checkpoint_id, path)
self._copy(src_path, dest_path)
return self.checkpoint_model(checkpoint_id, dest_path)
def restore_checkpoint(self, contents_mgr, checkpoint_id, path):
"""Restore a checkpoint."""
src_path = self.checkpoint_path(checkpoint_id, path)
dest_path = contents_mgr._get_os_path(path)
self._copy(src_path, dest_path)
# ContentsManager-independent checkpoint API
def rename_checkpoint(self, checkpoint_id, old_path, new_path):
"""Rename a checkpoint from old_path to new_path."""
old_cp_path = self.checkpoint_path(checkpoint_id, old_path)
new_cp_path = self.checkpoint_path(checkpoint_id, new_path)
if os.path.isfile(old_cp_path):
self.log.debug(
"Renaming checkpoint %s -> %s",
old_cp_path,
new_cp_path,
)
with self.perm_to_403():
shutil.move(old_cp_path, new_cp_path)
def delete_checkpoint(self, checkpoint_id, path):
"""delete a file's checkpoint"""
path = path.strip('/')
cp_path = self.checkpoint_path(checkpoint_id, path)
if not os.path.isfile(cp_path):
self.no_such_checkpoint(path, checkpoint_id)
self.log.debug("unlinking %s", cp_path)
with self.perm_to_403():
os.unlink(cp_path)
def list_checkpoints(self, path):
"""list the checkpoints for a given file
This contents manager currently only supports one checkpoint per file.
"""
path = path.strip('/')
checkpoint_id = "checkpoint"
os_path = self.checkpoint_path(checkpoint_id, path)
if not os.path.isfile(os_path):
return []
else:
return [self.checkpoint_model(checkpoint_id, os_path)]
# Checkpoint-related utilities
def checkpoint_path(self, checkpoint_id, path):
"""find the path to a checkpoint"""
path = path.strip('/')
parent, name = ('/' + path).rsplit('/', 1)
parent = parent.strip('/')
basename, ext = os.path.splitext(name)
filename = u"{name}-{checkpoint_id}{ext}".format(
name=basename,
checkpoint_id=checkpoint_id,
ext=ext,
)
os_path = self._get_os_path(path=parent)
cp_dir = os.path.join(os_path, self.checkpoint_dir)
with self.perm_to_403():
ensure_dir_exists(cp_dir)
cp_path = os.path.join(cp_dir, filename)
return cp_path
def checkpoint_model(self, checkpoint_id, os_path):
"""construct the info dict for a given checkpoint"""
stats = os.stat(os_path)
last_modified = tz.utcfromtimestamp(stats.st_mtime)
info = dict(
id=checkpoint_id,
last_modified=last_modified,
)
return info
# Error Handling
def no_such_checkpoint(self, path, checkpoint_id):
raise HTTPError(
404,
u'Checkpoint does not exist: %s@%s' % (path, checkpoint_id)
)
class GenericFileCheckpoints(GenericCheckpointsMixin, FileCheckpoints):
"""
Local filesystem Checkpoints that works with any conforming
ContentsManager.
"""
def create_file_checkpoint(self, content, format, path):
"""Create a checkpoint from the current content of a file."""
path = path.strip('/')
# only the one checkpoint ID:
checkpoint_id = u"checkpoint"
os_checkpoint_path = self.checkpoint_path(checkpoint_id, path)
self.log.debug("creating checkpoint for %s", path)
with self.perm_to_403():
self._save_file(os_checkpoint_path, content, format=format)
# return the checkpoint info
return self.checkpoint_model(checkpoint_id, os_checkpoint_path)
def create_notebook_checkpoint(self, nb, path):
"""Create a checkpoint from the current content of a notebook."""
path = path.strip('/')
# only the one checkpoint ID:
checkpoint_id = u"checkpoint"
os_checkpoint_path = self.checkpoint_path(checkpoint_id, path)
self.log.debug("creating checkpoint for %s", path)
with self.perm_to_403():
self._save_notebook(os_checkpoint_path, nb)
# return the checkpoint info
return self.checkpoint_model(checkpoint_id, os_checkpoint_path)
def get_notebook_checkpoint(self, checkpoint_id, path):
"""Get a checkpoint for a notebook."""
path = path.strip('/')
self.log.info("restoring %s from checkpoint %s", path, checkpoint_id)
os_checkpoint_path = self.checkpoint_path(checkpoint_id, path)
if not os.path.isfile(os_checkpoint_path):
self.no_such_checkpoint(path, checkpoint_id)
return {
'type': 'notebook',
'content': self._read_notebook(
os_checkpoint_path,
as_version=4,
),
}
def get_file_checkpoint(self, checkpoint_id, path):
"""Get a checkpoint for a file."""
path = path.strip('/')
self.log.info("restoring %s from checkpoint %s", path, checkpoint_id)
os_checkpoint_path = self.checkpoint_path(checkpoint_id, path)
if not os.path.isfile(os_checkpoint_path):
self.no_such_checkpoint(path, checkpoint_id)
content, format = self._read_file(os_checkpoint_path, format=None)
return {
'type': 'file',
'content': content,
'format': format,
}
|
wolfram74/numerical_methods_iserles_notes
|
venv/lib/python2.7/site-packages/IPython/html/services/contents/filecheckpoints.py
|
Python
|
mit
| 6,954 |
import React, { PropTypes } from 'react'
import ActionDelete from 'material-ui/svg-icons/action/delete'
import { colors } from '/styles'
import moduleStyles from '/styles/fileTree'
const RemoveBtn = ({ onClick }) => (
<ActionDelete
onClick={onClick}
style={moduleStyles.listIcon.base}
color={colors.light}
hoverColor={colors.hover.red} />
)
export default RemoveBtn
|
heshihao0813/registerScan
|
src/components/fileTree/RemoveBtn.js
|
JavaScript
|
mit
| 426 |
// Source : https://leetcode.com/problems/longest-substring-without-repeating-characters/
// Author : codeyu
// Date : 2016-09-20
/**********************************************************************************
*
* Given a string, find the length of the longest substring without repeating characters.
*
* Examples:
*
* Given "abcabcbb", the answer is "abc", which the length is 3.
*
* Given "bbbbb", the answer is "b", with the length of 1.
*
* Given "pwwkew", the answer is "wke", with the length of 3.
*
* Note that the answer must be a substring, "pwke" is a subsequence and not a substring.
*
**********************************************************************************/
using System.Collections.Generic;
using System;
namespace Algorithms
{
public class Solution003
{
public static int LengthOfLongestSubstring(string s)
{
var n = s.Length;
var charSet = new HashSet<char>();
int maxLength = 0, i = 0, j = 0;
while (i < n && j < n)
{
if (charSet.Add(s[j]))
{
j++;
maxLength = Math.Max(maxLength, j - i);
}
else
{
charSet.Remove(s[i]);
i++;
}
}
return maxLength;
}
}
}
|
AzureLocalizationTeam/leetcode
|
Algorithms/LengthOfLongestSubstring.cs
|
C#
|
mit
| 1,408 |
using System.Collections.ObjectModel;
namespace SimpleBackgroundUploadWebAPI.Areas.HelpPage.ModelDescriptions
{
public class ComplexTypeModelDescription : ModelDescription
{
public ComplexTypeModelDescription()
{
Properties = new Collection<ParameterDescription>();
}
public Collection<ParameterDescription> Properties { get; private set; }
}
}
|
dannycabrera/SimpleBackgroundUpload
|
SimpleBackgroundUploadWebAPI/SimpleBackgroundUploadWebAPI/Areas/HelpPage/ModelDescriptions/ComplexTypeModelDescription.cs
|
C#
|
mit
| 402 |
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
<meta http-equiv="X-UA-Compatible" content="IE=9"/>
<meta name="generator" content="Doxygen 1.8.4"/>
<title>Member List</title>
<link href="tabs.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="jquery.js"></script>
<script type="text/javascript" src="dynsections.js"></script>
<link href="doxygen.css" rel="stylesheet" type="text/css" />
</head>
<body>
<div id="top"><!-- do not remove this div, it is closed by doxygen! -->
<!-- end header part -->
<!-- Generated by Doxygen 1.8.4 -->
<div id="navrow1" class="tabs">
<ul class="tablist">
<li><a href="index.html"><span>Main Page</span></a></li>
<li><a href="pages.html"><span>Related Pages</span></a></li>
<li><a href="modules.html"><span>Modules</span></a></li>
<li><a href="namespaces.html"><span>Namespaces</span></a></li>
<li class="current"><a href="annotated.html"><span>Classes</span></a></li>
<li><a href="files.html"><span>Files</span></a></li>
</ul>
</div>
<div id="navrow2" class="tabs2">
<ul class="tablist">
<li><a href="annotated.html"><span>Class List</span></a></li>
<li><a href="hierarchy.html"><span>Class Hierarchy</span></a></li>
<li><a href="functions.html"><span>Class Members</span></a></li>
</ul>
</div>
<div id="nav-path" class="navpath">
<ul>
<li class="navelem"><a class="el" href="a00237.html">tbb</a></li><li class="navelem"><b>interface5</b></li><li class="navelem"><a class="el" href="a00046.html">concurrent_unordered_multimap</a></li> </ul>
</div>
</div><!-- top -->
<div class="header">
<div class="headertitle">
<div class="title">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator > Member List</div> </div>
</div><!--header-->
<div class="contents">
<p>This is the complete list of members for <a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a>, including all inherited members.</p>
<table class="directory">
<tr bgcolor="#f0f0f0" class="even"><td class="entry"><b>allocator_type</b> typedef (defined in <a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a>)</td><td class="entry"><a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a></td><td class="entry"></td></tr>
<tr bgcolor="#f0f0f0"><td class="entry"><b>concurrent_unordered_multimap</b>(size_type n_of_buckets=base_type::initial_bucket_number, const hasher &_Hasher=hasher(), const key_equal &_Key_equality=key_equal(), const allocator_type &a=allocator_type()) (defined in <a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a>)</td><td class="entry"><a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a></td><td class="entry"><span class="mlabel">inline</span><span class="mlabel">explicit</span></td></tr>
<tr bgcolor="#f0f0f0" class="even"><td class="entry"><b>concurrent_unordered_multimap</b>(const Allocator &a) (defined in <a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a>)</td><td class="entry"><a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a></td><td class="entry"><span class="mlabel">inline</span><span class="mlabel">explicit</span></td></tr>
<tr bgcolor="#f0f0f0"><td class="entry"><b>concurrent_unordered_multimap</b>(Iterator first, Iterator last, size_type n_of_buckets=base_type::initial_bucket_number, const hasher &_Hasher=hasher(), const key_equal &_Key_equality=key_equal(), const allocator_type &a=allocator_type()) (defined in <a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a>)</td><td class="entry"><a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a></td><td class="entry"><span class="mlabel">inline</span></td></tr>
<tr class="even"><td class="entry"><a class="el" href="a00046.html#a5957d29e5fa9f8c53538de3f7a41ebc9">concurrent_unordered_multimap</a>(std::initializer_list< value_type > il, size_type n_of_buckets=base_type::initial_bucket_number, const hasher &_Hasher=hasher(), const key_equal &_Key_equality=key_equal(), const allocator_type &a=allocator_type())</td><td class="entry"><a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a></td><td class="entry"><span class="mlabel">inline</span></td></tr>
<tr bgcolor="#f0f0f0"><td class="entry"><b>concurrent_unordered_multimap</b>(const concurrent_unordered_multimap &table) (defined in <a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a>)</td><td class="entry"><a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a></td><td class="entry"><span class="mlabel">inline</span></td></tr>
<tr bgcolor="#f0f0f0" class="even"><td class="entry"><b>concurrent_unordered_multimap</b>(concurrent_unordered_multimap &&table) (defined in <a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a>)</td><td class="entry"><a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a></td><td class="entry"><span class="mlabel">inline</span></td></tr>
<tr bgcolor="#f0f0f0"><td class="entry"><b>concurrent_unordered_multimap</b>(concurrent_unordered_multimap &&table, const Allocator &a) (defined in <a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a>)</td><td class="entry"><a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a></td><td class="entry"><span class="mlabel">inline</span></td></tr>
<tr bgcolor="#f0f0f0" class="even"><td class="entry"><b>concurrent_unordered_multimap</b>(const concurrent_unordered_multimap &table, const Allocator &a) (defined in <a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a>)</td><td class="entry"><a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a></td><td class="entry"><span class="mlabel">inline</span></td></tr>
<tr bgcolor="#f0f0f0"><td class="entry"><b>const_iterator</b> typedef (defined in <a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a>)</td><td class="entry"><a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a></td><td class="entry"></td></tr>
<tr bgcolor="#f0f0f0" class="even"><td class="entry"><b>const_local_iterator</b> typedef (defined in <a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a>)</td><td class="entry"><a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a></td><td class="entry"></td></tr>
<tr bgcolor="#f0f0f0"><td class="entry"><b>const_pointer</b> typedef (defined in <a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a>)</td><td class="entry"><a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a></td><td class="entry"></td></tr>
<tr bgcolor="#f0f0f0" class="even"><td class="entry"><b>const_reference</b> typedef (defined in <a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a>)</td><td class="entry"><a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a></td><td class="entry"></td></tr>
<tr bgcolor="#f0f0f0"><td class="entry"><b>difference_type</b> typedef (defined in <a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a>)</td><td class="entry"><a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a></td><td class="entry"></td></tr>
<tr bgcolor="#f0f0f0" class="even"><td class="entry"><b>hasher</b> typedef (defined in <a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a>)</td><td class="entry"><a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a></td><td class="entry"></td></tr>
<tr bgcolor="#f0f0f0"><td class="entry"><b>iterator</b> typedef (defined in <a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a>)</td><td class="entry"><a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a></td><td class="entry"></td></tr>
<tr bgcolor="#f0f0f0" class="even"><td class="entry"><b>key_compare</b> typedef (defined in <a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a>)</td><td class="entry"><a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a></td><td class="entry"></td></tr>
<tr bgcolor="#f0f0f0"><td class="entry"><b>key_equal</b> typedef (defined in <a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a>)</td><td class="entry"><a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a></td><td class="entry"></td></tr>
<tr bgcolor="#f0f0f0" class="even"><td class="entry"><b>key_type</b> typedef (defined in <a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a>)</td><td class="entry"><a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a></td><td class="entry"></td></tr>
<tr bgcolor="#f0f0f0"><td class="entry"><b>local_iterator</b> typedef (defined in <a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a>)</td><td class="entry"><a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a></td><td class="entry"></td></tr>
<tr bgcolor="#f0f0f0" class="even"><td class="entry"><b>mapped_type</b> typedef (defined in <a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a>)</td><td class="entry"><a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a></td><td class="entry"></td></tr>
<tr bgcolor="#f0f0f0"><td class="entry"><b>operator=</b>(const concurrent_unordered_multimap &table) (defined in <a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a>)</td><td class="entry"><a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a></td><td class="entry"><span class="mlabel">inline</span></td></tr>
<tr bgcolor="#f0f0f0" class="even"><td class="entry"><b>operator=</b>(concurrent_unordered_multimap &&table) (defined in <a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a>)</td><td class="entry"><a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a></td><td class="entry"><span class="mlabel">inline</span></td></tr>
<tr bgcolor="#f0f0f0"><td class="entry"><b>pointer</b> typedef (defined in <a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a>)</td><td class="entry"><a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a></td><td class="entry"></td></tr>
<tr bgcolor="#f0f0f0" class="even"><td class="entry"><b>reference</b> typedef (defined in <a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a>)</td><td class="entry"><a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a></td><td class="entry"></td></tr>
<tr bgcolor="#f0f0f0"><td class="entry"><b>size_type</b> typedef (defined in <a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a>)</td><td class="entry"><a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a></td><td class="entry"></td></tr>
<tr bgcolor="#f0f0f0" class="even"><td class="entry"><b>value_type</b> typedef (defined in <a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a>)</td><td class="entry"><a class="el" href="a00046.html">tbb::interface5::concurrent_unordered_multimap< Key, T, Hasher, Key_equality, Allocator ></a></td><td class="entry"></td></tr>
</table></div><!-- contents -->
<hr>
<p></p>
Copyright © 2005-2017 Intel Corporation. All Rights Reserved.
<p></p>
Intel, Pentium, Intel Xeon, Itanium, Intel XScale and VTune are
registered trademarks or trademarks of Intel Corporation or its
subsidiaries in the United States and other countries.
<p></p>
* Other names and brands may be claimed as the property of others.
|
bachelorwhc/Valkyrie
|
third_party/tbb/doc/html/a00371.html
|
HTML
|
mit
| 15,116 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.