text
stringlengths 4
5.48M
| meta
stringlengths 14
6.54k
|
---|---|
class BasePanelBrowserTest : public InProcessBrowserTest {
public:
class MockDisplaySettingsProvider : public DisplaySettingsProvider {
public:
explicit MockDisplaySettingsProvider(Observer* observer)
: DisplaySettingsProvider(observer) {
}
virtual ~MockDisplaySettingsProvider() { }
virtual void SetWorkArea(const gfx::Rect& work_area) = 0;
virtual void EnableAutoHidingDesktopBar(DesktopBarAlignment alignment,
bool enabled,
int thickness) = 0;
virtual void SetDesktopBarVisibility(DesktopBarAlignment alignment,
DesktopBarVisibility visibility) = 0;
virtual void SetDesktopBarThickness(DesktopBarAlignment alignment,
int thickness) = 0;
};
BasePanelBrowserTest();
virtual ~BasePanelBrowserTest();
// Linux bots use icewm which activate windows in ways that break
// certain panel tests. Skip those tests when running on the bots.
// We do not disable the tests to make it easy for developers to run
// them locally.
bool SkipTestIfIceWM();
// Gnome running compiz refuses to activate a window that was initially
// created as inactive, causing certain panel tests to fail. These tests
// pass fine on the bots, but fail for developers as Gnome running compiz
// is the typical linux dev machine configuration. We do not disable the
// tests to ensure we still have coverage on the bots.
bool SkipTestIfCompizWM();
virtual void SetUpCommandLine(CommandLine* command_line) OVERRIDE;
virtual void SetUpOnMainThread() OVERRIDE;
protected:
enum ActiveState { SHOW_AS_ACTIVE, SHOW_AS_INACTIVE };
struct CreatePanelParams {
std::string name;
gfx::Rect bounds;
ActiveState show_flag;
GURL url;
bool wait_for_fully_created;
ActiveState expected_active_state;
CreatePanelParams(const std::string& name,
const gfx::Rect& bounds,
ActiveState show_flag)
: name(name),
bounds(bounds),
show_flag(show_flag),
wait_for_fully_created(true),
expected_active_state(show_flag) {
}
};
Panel* CreatePanelWithParams(const CreatePanelParams& params);
Panel* CreatePanelWithBounds(const std::string& panel_name,
const gfx::Rect& bounds);
Panel* CreatePanel(const std::string& panel_name);
Panel* CreateDockedPanel(const std::string& name, const gfx::Rect& bounds);
Panel* CreateDetachedPanel(const std::string& name, const gfx::Rect& bounds);
// The caller should have already created enough docked panels to trigger
// overflow.
Panel* CreateOverflowPanel(const std::string& name, const gfx::Rect& bounds);
void WaitForPanelAdded(Panel* panel);
void WaitForPanelRemoved(Panel* panel);
void WaitForPanelActiveState(Panel* panel, ActiveState state);
void WaitForWindowSizeAvailable(Panel* panel);
void WaitForBoundsAnimationFinished(Panel* panel);
void WaitForLayoutModeChanged(Panel* panel, PanelStrip::Type layout_type);
void WaitForExpansionStateChanged(Panel* panel,
Panel::ExpansionState expansion_state);
void CreateTestTabContents(Browser* browser);
scoped_refptr<Extension> CreateExtension(const FilePath::StringType& path,
Extension::Location location,
const DictionaryValue& extra_value);
static void MoveMouse(const gfx::Point& position);
void CloseWindowAndWait(Browser* browser);
static std::string MakePanelName(int index);
gfx::Rect GetTestingWorkArea() const;
void SetTestingWorkArea(const gfx::Rect& work_area);
MockDisplaySettingsProvider* mock_display_settings_provider() const {
return mock_display_settings_provider_;
}
static const FilePath::CharType* kTestDir;
private:
// Passed to and owned by PanelManager.
MockDisplaySettingsProvider* mock_display_settings_provider_;
};
#endif // CHROME_BROWSER_UI_PANELS_BASE_PANEL_BROWSER_TEST_H_
| {'content_hash': '25f507639d6a082843ab51e0915ad544', 'timestamp': '', 'source': 'github', 'line_count': 104, 'max_line_length': 79, 'avg_line_length': 39.81730769230769, 'alnum_prop': 0.6863076551557595, 'repo_name': 'gavinp/chromium', 'id': '7cd500c6e4708e79a47a96924ca785b8f659414f', 'size': '4792', 'binary': False, 'copies': '1', 'ref': 'refs/heads/trunk', 'path': 'chrome/browser/ui/panels/base_panel_browser_test.h', 'mode': '33188', 'license': 'bsd-3-clause', 'language': [{'name': 'Assembly', 'bytes': '1178292'}, {'name': 'C', 'bytes': '72353788'}, {'name': 'C++', 'bytes': '117593783'}, {'name': 'F#', 'bytes': '381'}, {'name': 'Go', 'bytes': '10440'}, {'name': 'Java', 'bytes': '24087'}, {'name': 'JavaScript', 'bytes': '8781314'}, {'name': 'Objective-C', 'bytes': '5340290'}, {'name': 'PHP', 'bytes': '97796'}, {'name': 'Perl', 'bytes': '918286'}, {'name': 'Python', 'bytes': '5942009'}, {'name': 'R', 'bytes': '524'}, {'name': 'Shell', 'bytes': '4149832'}, {'name': 'Tcl', 'bytes': '255109'}]} |
package levenshtein
import "unicode/utf8"
// ComputeDistance computes the levenshtein distance between the two
// strings passed as an argument. The return value is the levenshtein distance
//
// Works on runes (Unicode code points) but does not normalize
// the input strings. See https://blog.golang.org/normalization
// and the golang.org/x/text/unicode/norm pacage.
func ComputeDistance(a, b string) int {
if len(a) == 0 {
return utf8.RuneCountInString(b)
}
if len(b) == 0 {
return utf8.RuneCountInString(a)
}
if a == b {
return 0
}
// We need to convert to []rune if the strings are non-ascii.
// This could be avoided by using utf8.RuneCountInString
// and then doing some juggling with rune indices.
// The primary challenge is keeping track of the previous rune.
// With a range loop, its not that easy. And with a for-loop
// we need to keep track of the inter-rune width using utf8.DecodeRuneInString
s1 := []rune(a)
s2 := []rune(b)
// swap to save some memory O(min(a,b)) instead of O(a)
if len(s1) > len(s2) {
s1, s2 = s2, s1
}
lenS1 := len(s1)
lenS2 := len(s2)
// init the row
x := make([]int, lenS1+1)
// we start from 1 because index 0 is already 0.
for i := 1; i < len(x); i++ {
x[i] = i
}
// make a dummy bounds check to prevent the 2 bounds check down below.
// The one inside the loop is particularly costly.
_ = x[lenS1]
// fill in the rest
for i := 1; i <= lenS2; i++ {
prev := i
var current int
for j := 1; j <= lenS1; j++ {
if s2[i-1] == s1[j-1] {
current = x[j-1] // match
} else {
current = min(min(x[j-1]+1, prev+1), x[j]+1)
}
x[j-1] = prev
prev = current
}
x[lenS1] = prev
}
return x[lenS1]
}
func min(a, b int) int {
if a < b {
return a
}
return b
}
| {'content_hash': '81c25a269ae9b34dd3e7e05e5cb78020', 'timestamp': '', 'source': 'github', 'line_count': 73, 'max_line_length': 79, 'avg_line_length': 24.19178082191781, 'alnum_prop': 0.6404303510758776, 'repo_name': 'EnMasseProject/enmasse', 'id': '25be373a5f8bdd0f11d1225090d1b44dc8676043', 'size': '1942', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'vendor/github.com/agnivade/levenshtein/levenshtein.go', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'CSS', 'bytes': '1040'}, {'name': 'Dockerfile', 'bytes': '5215'}, {'name': 'Go', 'bytes': '720426'}, {'name': 'Groovy', 'bytes': '9065'}, {'name': 'HTML', 'bytes': '4176'}, {'name': 'Java', 'bytes': '3876076'}, {'name': 'JavaScript', 'bytes': '1013704'}, {'name': 'Makefile', 'bytes': '20919'}, {'name': 'Python', 'bytes': '6730'}, {'name': 'Ragel', 'bytes': '3778'}, {'name': 'Shell', 'bytes': '71076'}, {'name': 'TypeScript', 'bytes': '515844'}, {'name': 'XSLT', 'bytes': '11077'}, {'name': 'Yacc', 'bytes': '5306'}]} |
* [Finding the Best Match With a Top-N Query](http://blog.fatalmind.com/2010/09/29/finding-the-best-match-with-a-top-n-query/)
* [SQL Indexing and Tuning e-Book](http://use-the-index-luke.com/)
| {'content_hash': 'f118368ea41e712fab6d3104838c6af4', 'timestamp': '', 'source': 'github', 'line_count': 2, 'max_line_length': 126, 'avg_line_length': 97.0, 'alnum_prop': 0.7268041237113402, 'repo_name': 'andreia/webdev-references', 'id': '217623cb393b957f878318c023135391ac173cdf', 'size': '224', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'database/db.md', 'mode': '33188', 'license': 'mit', 'language': []} |
namespace sync_driver {
class LocalDeviceInfoProvider;
class SyncClient;
}
namespace sync_driver_v2 {
// DataTypeController for DEVICE_INFO model type.
class DeviceInfoModelTypeController : public UIModelTypeController {
public:
DeviceInfoModelTypeController(
const scoped_refptr<base::SingleThreadTaskRunner>& ui_thread,
const base::Closure& error_callback,
sync_driver::SyncClient* sync_client,
sync_driver::LocalDeviceInfoProvider* local_device_info_provider);
private:
~DeviceInfoModelTypeController() override;
sync_driver::LocalDeviceInfoProvider* const local_device_info_provider_;
scoped_ptr<sync_driver::LocalDeviceInfoProvider::Subscription> subscription_;
DISALLOW_COPY_AND_ASSIGN(DeviceInfoModelTypeController);
};
} // namespace sync_driver_v2
#endif // COMPONENTS_SYNC_DRIVER_DEVICE_INFO_MODEL_TYPE_CONTROLLER_H_
| {'content_hash': '4fcfea6e3d9020309ff19e12167918a9', 'timestamp': '', 'source': 'github', 'line_count': 27, 'max_line_length': 79, 'avg_line_length': 32.25925925925926, 'alnum_prop': 0.78300803673938, 'repo_name': 'ds-hwang/chromium-crosswalk', 'id': 'fb189589318551bbfb58d54b00ecfa743c76b082', 'size': '1335', 'binary': False, 'copies': '4', 'ref': 'refs/heads/master', 'path': 'components/sync_driver/device_info_model_type_controller.h', 'mode': '33188', 'license': 'bsd-3-clause', 'language': []} |
@implementation MSTouchesBeganGestureRecognizer
- (void) reset
{
[super reset];
}
- (void) touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event
{
[super touchesBegan:touches withEvent:event];
self.state = UIGestureRecognizerStateEnded;
}
@end
| {'content_hash': '92510d4b8b054bbc5eef1a7bc1cf9d8d', 'timestamp': '', 'source': 'github', 'line_count': 14, 'max_line_length': 65, 'avg_line_length': 18.285714285714285, 'alnum_prop': 0.75, 'repo_name': 'e7mac/MSAutoScroll', 'id': '37b7dc0cce45a9e010107e9807a4747d0d4dd344', 'size': '457', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'Classes/MSTouchesBeganGestureRecognizer.m', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'Objective-C', 'bytes': '16335'}, {'name': 'Ruby', 'bytes': '4610'}]} |
Tools for statistics write in go
| {'content_hash': 'bf230cecc62f57cd89a2962a5176ef05', 'timestamp': '', 'source': 'github', 'line_count': 1, 'max_line_length': 32, 'avg_line_length': 33.0, 'alnum_prop': 0.8181818181818182, 'repo_name': 'ThibaultRiviere/stats', 'id': '951107e0bcbd4d873a4c8f193c75ecfd5fe5a841', 'size': '41', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'README.md', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'Go', 'bytes': '10444'}]} |
!
! Distributed under the OSI-approved Apache License, Version 2.0. See
! accompanying file Copyright.txt for details.
!
! adios2_variable_min_mod.f90 : ADIOS2 Fortran bindings for overloaded
! adios2_variable_min subroutine
! Created on: Nov 15, 2018
! Author: William F Godoy [email protected]
!
module adios2_variable_min_mod
use adios2_parameters_mod
use adios2_variable_mod
implicit none
interface adios2_variable_min
module procedure adios2_variable_min_real
module procedure adios2_variable_min_dp
module procedure adios2_variable_min_complex
module procedure adios2_variable_min_complex_dp
module procedure adios2_variable_min_integer1
module procedure adios2_variable_min_integer2
module procedure adios2_variable_min_integer4
module procedure adios2_variable_min_integer8
end interface
external adios2_variable_min_f2c
contains
subroutine adios2_variable_min_real(minimum, variable, ierr)
real, intent(out) :: minimum
type(adios2_variable), intent(in) :: variable
integer, intent(out) :: ierr
call adios2_variable_check_type(variable, adios2_type_real, &
'variable_min', ierr)
if (ierr == 0) then
call adios2_variable_min_f2c(minimum, variable%f2c, ierr)
end if
end subroutine
subroutine adios2_variable_min_dp(minimum, variable, ierr)
real(kind=8), intent(out) :: minimum
type(adios2_variable), intent(in) :: variable
integer, intent(out) :: ierr
call adios2_variable_check_type(variable, adios2_type_dp, &
'variable_min', ierr)
if (ierr == 0) then
call adios2_variable_min_f2c(minimum, variable%f2c, ierr)
end if
end subroutine
subroutine adios2_variable_min_complex(minimum, variable, ierr)
complex, intent(out) :: minimum
type(adios2_variable), intent(in) :: variable
integer, intent(out) :: ierr
call adios2_variable_check_type(variable, adios2_type_complex, &
'variable_min', ierr)
if (ierr == 0) then
call adios2_variable_min_f2c(minimum, variable%f2c, ierr)
end if
end subroutine
subroutine adios2_variable_min_complex_dp(minimum, variable, ierr)
complex(kind=8), intent(out) :: minimum
type(adios2_variable), intent(in) :: variable
integer, intent(out) :: ierr
call adios2_variable_check_type(variable, adios2_type_complex_dp, &
'variable_min', ierr)
if (ierr == 0) then
call adios2_variable_min_f2c(minimum, variable%f2c, ierr)
end if
end subroutine
subroutine adios2_variable_min_integer1(minimum, variable, ierr)
integer(kind=1), intent(out) :: minimum
type(adios2_variable), intent(in) :: variable
integer, intent(out) :: ierr
call adios2_variable_check_type(variable, adios2_type_integer1, &
'variable_min', ierr)
if (ierr == 0) then
call adios2_variable_min_f2c(minimum, variable%f2c, ierr)
end if
end subroutine
subroutine adios2_variable_min_integer2(minimum, variable, ierr)
integer(kind=2), intent(out) :: minimum
type(adios2_variable), intent(in) :: variable
integer, intent(out) :: ierr
call adios2_variable_check_type(variable, adios2_type_integer2, &
'variable_min', ierr)
if (ierr == 0) then
call adios2_variable_min_f2c(minimum, variable%f2c, ierr)
end if
end subroutine
subroutine adios2_variable_min_integer4(minimum, variable, ierr)
integer(kind=4), intent(out) :: minimum
type(adios2_variable), intent(in) :: variable
integer, intent(out) :: ierr
call adios2_variable_check_type(variable, adios2_type_integer4, &
'variable_min', ierr)
if (ierr == 0) then
call adios2_variable_min_f2c(minimum, variable%f2c, ierr)
end if
end subroutine
subroutine adios2_variable_min_integer8(minimum, variable, ierr)
integer(kind=8), intent(out) :: minimum
type(adios2_variable), intent(in) :: variable
integer, intent(out) :: ierr
call adios2_variable_check_type(variable, adios2_type_integer8, &
'variable_min', ierr)
if (ierr == 0) then
call adios2_variable_min_f2c(minimum, variable%f2c, ierr)
end if
end subroutine
end module
| {'content_hash': '187c09235797bc17dd89b23dddd20f42', 'timestamp': '', 'source': 'github', 'line_count': 136, 'max_line_length': 75, 'avg_line_length': 35.11764705882353, 'alnum_prop': 0.606574539363484, 'repo_name': 'ornladios/ADIOS2', 'id': 'fe156e5e42a877259aa45565d7e16998d609a9fb', 'size': '4776', 'binary': False, 'copies': '2', 'ref': 'refs/heads/master', 'path': 'bindings/Fortran/modules/adios2_variable_min_mod.f90', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'C', 'bytes': '854523'}, {'name': 'C++', 'bytes': '6522951'}, {'name': 'CMake', 'bytes': '758150'}, {'name': 'Cuda', 'bytes': '2207'}, {'name': 'Dockerfile', 'bytes': '40776'}, {'name': 'Fortran', 'bytes': '631183'}, {'name': 'MATLAB', 'bytes': '26685'}, {'name': 'Makefile', 'bytes': '8099'}, {'name': 'PowerShell', 'bytes': '8948'}, {'name': 'Python', 'bytes': '224209'}, {'name': 'Roff', 'bytes': '1214'}, {'name': 'Shell', 'bytes': '92677'}, {'name': 'Tcl', 'bytes': '423'}]} |
//
// NSString+Validate.m
// QQMusic
//
// Created by xwmedia01 on 2017/3/17.
// Copyright © 2017年 xwmedia01. All rights reserved.
//
#import "NSString+Validate.h"
@implementation NSString (Validate)
//判断手机号码格式是否正确
+ (BOOL)valiMobile:(NSString *)mobile
{
mobile = [mobile stringByReplacingOccurrencesOfString:@" " withString:@""];
if (mobile.length != 11)
{
return NO;
}else{
/**
* 移动号段正则表达式
*/
NSString *CM_NUM = @"^((13[4-9])|(147)|(15[0-2,7-9])|(178)|(18[2-4,7-8]))\\d{8}|(1705)\\d{7}$";
/**
* 联通号段正则表达式
*/
NSString *CU_NUM = @"^((13[0-2])|(145)|(15[5-6])|(176)|(18[5,6]))\\d{8}|(1709)\\d{7}$";
/**
* 电信号段正则表达式
*/
NSString *CT_NUM = @"^((133)|(153)|(177)|(18[0,1,9]))\\d{8}$";
NSPredicate *pred1 = [NSPredicate predicateWithFormat:@"SELF MATCHES %@", CM_NUM];
BOOL isMatch1 = [pred1 evaluateWithObject:mobile];
NSPredicate *pred2 = [NSPredicate predicateWithFormat:@"SELF MATCHES %@", CU_NUM];
BOOL isMatch2 = [pred2 evaluateWithObject:mobile];
NSPredicate *pred3 = [NSPredicate predicateWithFormat:@"SELF MATCHES %@", CT_NUM];
BOOL isMatch3 = [pred3 evaluateWithObject:mobile];
if (isMatch1 || isMatch2 || isMatch3) {
return YES;
}else{
return NO;
}
}
}
+ (BOOL) validateUserName:(NSString *)name
{
NSString *userNameRegex = @"^[A-Za-z0-9]{6,20}+$";
NSPredicate *userNamePredicate = [NSPredicate predicateWithFormat:@"SELF MATCHES %@",userNameRegex];
BOOL B = [userNamePredicate evaluateWithObject:name];
return B;
}
+ (BOOL) validateEmail:(NSString *)email
{
NSString *emailRegex = @"[A-Z0-9a-z._%+-]+@[A-Za-z0-9.-]+\\.[A-Za-z]{2,4}";
NSPredicate *emailTest = [NSPredicate predicateWithFormat:@"SELF MATCHES %@", emailRegex];
return [emailTest evaluateWithObject:email];
}
+ (BOOL) validatePassword:(NSString *)passWord
{
NSString *passWordRegex = @"^[a-zA-Z0-9]{6,20}+$";
NSPredicate *passWordPredicate = [NSPredicate predicateWithFormat:@"SELF MATCHES %@",passWordRegex];
return [passWordPredicate evaluateWithObject:passWord];
}
+ (BOOL) validateIdentityCard: (NSString *)identityCard
{
BOOL flag;
if (identityCard.length <= 0) {
flag = NO;
return flag;
}
NSString *regex2 = @"^(\\d{14}|\\d{17})(\\d|[xX])$";
NSPredicate *identityCardPredicate = [NSPredicate predicateWithFormat:@"SELF MATCHES %@",regex2];
return [identityCardPredicate evaluateWithObject:identityCard];
}
+ (BOOL) validateNickname:(NSString *)nickname
{
NSString *nicknameRegex = @"^[\u4e00-\u9fa5]{4,8}$";
NSPredicate *passWordPredicate = [NSPredicate predicateWithFormat:@"SELF MATCHES %@",nicknameRegex];
return [passWordPredicate evaluateWithObject:nickname];
}
+ (BOOL) validateCarNo:(NSString *)carNo
{
NSString *carRegex = @"^[\u4e00-\u9fa5]{1}[a-zA-Z]{1}[a-zA-Z_0-9]{4}[a-zA-Z_0-9_\u4e00-\u9fa5]$";
NSPredicate *carTest = [NSPredicate predicateWithFormat:@"SELF MATCHES %@",carRegex];
NSLog(@"carTest is %@",carTest);
return [carTest evaluateWithObject:carNo];
}
+ (BOOL) validateCarType:(NSString *)CarType
{
NSString *CarTypeRegex = @"^[\u4E00-\u9FFF]+$";
NSPredicate *carTest = [NSPredicate predicateWithFormat:@"SELF MATCHES %@",CarTypeRegex];
return [carTest evaluateWithObject:CarType];
}
@end
| {'content_hash': 'f4c058ec256cdc7056dcf51551e92d9a', 'timestamp': '', 'source': 'github', 'line_count': 107, 'max_line_length': 104, 'avg_line_length': 32.308411214953274, 'alnum_prop': 0.6259762800115707, 'repo_name': 'xsd0720/QQMusic', 'id': '58b7f04e9412fecdd99d0374aa234d822a3e4527', 'size': '3538', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'QQMusic/Category/NSString+Validate.m', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'C', 'bytes': '40695'}, {'name': 'Objective-C', 'bytes': '1584063'}]} |
<?xml version="1.0" encoding="utf-8"?>
<shape xmlns:android="http://schemas.android.com/apk/res/android"
android:shape="rectangle" >
<gradient
android:angle="270"
android:centerColor="#1a2933"
android:endColor="#1a2933"
android:startColor="#1a2933" />
</shape>
| {'content_hash': '4ab4fb7b759aa096c19819bef05cc1e5', 'timestamp': '', 'source': 'github', 'line_count': 9, 'max_line_length': 65, 'avg_line_length': 33.44444444444444, 'alnum_prop': 0.6378737541528239, 'repo_name': 'RyanTech/ubivearound_2', 'id': 'c11611de82f6719842c349e30f7601b53321c585', 'size': '301', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'res/drawable/bg_title.xml', 'mode': '33188', 'license': 'apache-2.0', 'language': []} |
#include "../glx/hardext.h"
#include "array.h"
#include "enum_info.h"
#include "fpe.h"
#include "gl4es.h"
#include "gles.h"
#include "glstate.h"
#include "init.h"
#include "list.h"
#include "loader.h"
#include "render.h"
//#define DEBUG
#ifdef DEBUG
#define DBG(a) a
#else
#define DBG(a)
#endif
static GLboolean is_cache_compatible(GLsizei count) {
#define T2(AA, A, B) \
if(glstate->vao->AA!=glstate->vao->B.enabled) return GL_FALSE; \
if(glstate->vao->B.enabled && memcmp(&glstate->vao->vertexattrib[A], &glstate->vao->B.state, sizeof(vertexattrib_t))) return GL_FALSE;
#define TEST(A,B) T2(vertexattrib[A].enabled, A, B)
#define TESTA(A,B,I) T2(vertexattrib[A+i].enabled, A+i, B[i])
if(glstate->vao == glstate->defaultvao) return GL_FALSE;
if(count > glstate->vao->cache_count) return GL_FALSE;
TEST(ATT_VERTEX, vert)
TEST(ATT_COLOR, color)
TEST(ATT_SECONDARY, secondary)
TEST(ATT_FOGCOORD, fog)
TEST(ATT_NORMAL, normal)
for (int i=0; i<hardext.maxtex; i++) {
TESTA(ATT_MULTITEXCOORD0,tex,i)
}
#undef TESTA
#undef TEST
#undef T2
return GL_TRUE;
}
GLboolean is_list_compatible(renderlist_t* list) {
#define T2(AA, A, B) \
if(glstate->vao->AA!=(list->B!=NULL)) return GL_FALSE;
#define TEST(A,B) T2(vertexattrib[A].enabled, A, B)
#define TESTA(A,B,I) T2(vertexattrib[A+i].enabled, A+i, B[i])
if(list->post_color && !list->color) return GL_FALSE;
if(list->post_normal && !list->normal) return GL_FALSE;
TEST(ATT_VERTEX, vert)
TEST(ATT_COLOR, color)
TEST(ATT_SECONDARY, secondary)
TEST(ATT_FOGCOORD, fogcoord)
TEST(ATT_NORMAL, normal)
for (int i=0; i<hardext.maxtex; i++) {
TESTA(ATT_MULTITEXCOORD0,tex,i)
}
#undef TESTA
#undef TEST
#undef T2
return GL_TRUE;
}
renderlist_t *arrays_to_renderlist(renderlist_t *list, GLenum mode,
GLsizei skip, GLsizei count) {
if (! list)
list = alloc_renderlist();
DBG(LOGD("arrary_to_renderlist, compiling=%d, skip=%d, count=%d\n", glstate->list.compiling, skip, count);)
list->mode = mode;
list->mode_init = mode;
list->mode_dimension = rendermode_dimensions(mode);
list->len = count-skip;
list->cap = count-skip;
// check cache if any
if(glstate->vao->shared_arrays) {
if (!is_cache_compatible(count))
VaoSharedClear(glstate->vao);
}
if(glstate->vao->shared_arrays) {
#define OP(A, N) (A)?(A+skip*N):NULL
list->vert = OP(glstate->vao->vert.ptr,4);
list->color = OP(glstate->vao->color.ptr,4);
list->secondary = OP(glstate->vao->secondary.ptr,4);
list->fogcoord = OP(glstate->vao->fog.ptr, 1);
list->normal = OP(glstate->vao->normal.ptr,3);
for (int i=0; i<hardext.maxtex; i++)
list->tex[i] = OP(glstate->vao->tex[i].ptr,4);
#undef OP
list->shared_arrays = glstate->vao->shared_arrays;
(*glstate->vao->shared_arrays)++;
} else {
if(!globals4es.novaocache && glstate->vao != glstate->defaultvao) {
// prepare a vao cache object
list->shared_arrays = glstate->vao->shared_arrays = (int*)malloc(sizeof(int));
*glstate->vao->shared_arrays = 2; // already shared between glstate & list
#define G2(AA, A, B) \
glstate->vao->B.enabled = glstate->vao->vertexattrib[AA].enabled; \
if (glstate->vao->B.enabled) memcpy(&glstate->vao->B.state, &glstate->vao->vertexattrib[A], sizeof(vertexattrib_t));
#define GO(A,B) G2(A, A, B)
#define GOA(A,B,I) G2(A+i, A+i, B[i])
GO(ATT_VERTEX, vert)
GO(ATT_COLOR, color)
GO(ATT_SECONDARY, secondary)
GO(ATT_FOGCOORD, fog)
GO(ATT_NORMAL, normal)
for (int i=0; i<hardext.maxtex; i++) {
GOA(ATT_MULTITEXCOORD0,tex,i)
}
glstate->vao->cache_count = count;
#undef GOA
#undef GO
#undef G2
}
if (glstate->vao->vertexattrib[ATT_VERTEX].enabled) {
if(glstate->vao->shared_arrays) {
glstate->vao->vert.ptr = copy_gl_pointer_tex(&glstate->vao->vertexattrib[ATT_VERTEX], 4, 0, count);
list->vert = glstate->vao->vert.ptr + 4*skip;
} else
list->vert = copy_gl_pointer_tex(&glstate->vao->vertexattrib[ATT_VERTEX], 4, skip, count);
}
if (glstate->vao->vertexattrib[ATT_COLOR].enabled) {
if(glstate->vao->shared_arrays) {
if(glstate->vao->vertexattrib[ATT_COLOR].size==GL_BGRA)
glstate->vao->color.ptr = copy_gl_pointer_color_bgra(glstate->vao->vertexattrib[ATT_COLOR].pointer, glstate->vao->vertexattrib[ATT_COLOR].stride, 4, 0, count);
else
glstate->vao->color.ptr = copy_gl_pointer_color(&glstate->vao->vertexattrib[ATT_COLOR], 4, 0, count);
list->color = glstate->vao->color.ptr + 4*skip;
} else {
if(glstate->vao->vertexattrib[ATT_COLOR].size==GL_BGRA)
list->color = copy_gl_pointer_color_bgra(glstate->vao->vertexattrib[ATT_COLOR].pointer, glstate->vao->vertexattrib[ATT_COLOR].stride, 4, skip, count);
else
list->color = copy_gl_pointer_color(&glstate->vao->vertexattrib[ATT_COLOR], 4, skip, count);
}
}
if (glstate->vao->vertexattrib[ATT_SECONDARY].enabled/* && glstate->enable.color_array*/) {
if(glstate->vao->shared_arrays) {
if(glstate->vao->vertexattrib[ATT_SECONDARY].size==GL_BGRA)
glstate->vao->secondary.ptr = copy_gl_pointer_color_bgra(glstate->vao->vertexattrib[ATT_SECONDARY].pointer, glstate->vao->vertexattrib[ATT_SECONDARY].stride, 4, 0, count);
else
glstate->vao->secondary.ptr = copy_gl_pointer(&glstate->vao->vertexattrib[ATT_SECONDARY], 4, 0, count); // alpha chanel is always 0 for secondary...
list->secondary = glstate->vao->secondary.ptr + 4*skip;
} else {
if(glstate->vao->vertexattrib[ATT_SECONDARY].size==GL_BGRA)
list->secondary = copy_gl_pointer_color_bgra(glstate->vao->vertexattrib[ATT_SECONDARY].pointer, glstate->vao->vertexattrib[ATT_SECONDARY].stride, 4, skip, count);
else
list->secondary = copy_gl_pointer(&glstate->vao->vertexattrib[ATT_SECONDARY], 4, skip, count); // alpha chanel is always 0 for secondary...
}
}
if (glstate->vao->vertexattrib[ATT_NORMAL].enabled) {
if(glstate->vao->shared_arrays) {
glstate->vao->normal.ptr = copy_gl_pointer_raw(&glstate->vao->vertexattrib[ATT_NORMAL], 3, 0, count);
list->normal = glstate->vao->normal.ptr + 3*skip;
} else
list->normal = copy_gl_pointer_raw(&glstate->vao->vertexattrib[ATT_NORMAL], 3, skip, count);
}
if (glstate->vao->vertexattrib[ATT_FOGCOORD].enabled) {
if(glstate->vao->shared_arrays) {
glstate->vao->fog.ptr = copy_gl_pointer_raw(&glstate->vao->vertexattrib[ATT_FOGCOORD], 1, 0, count);
list->fogcoord = glstate->vao->fog.ptr + 1*skip;
} else
list->fogcoord = copy_gl_pointer_raw(&glstate->vao->vertexattrib[ATT_FOGCOORD], 1, skip, count);
}
for (int i=0; i<glstate->vao->maxtex; i++) {
if (glstate->vao->vertexattrib[ATT_MULTITEXCOORD0+i].enabled) {
if(glstate->vao->shared_arrays) {
glstate->vao->tex[i].ptr = copy_gl_pointer_tex(&glstate->vao->vertexattrib[ATT_MULTITEXCOORD0+i], 4, 0, count);
list->tex[i] = glstate->vao->tex[i].ptr + 4*skip;
} else
list->tex[i] = copy_gl_pointer_tex(&glstate->vao->vertexattrib[ATT_MULTITEXCOORD0+i], 4, skip, count);
}
}
}
for (int i=0; i<hardext.maxtex; i++)
if(list->tex[i] && list->maxtex < i+1) list->maxtex = i+1;
return list;
}
static renderlist_t *arrays_add_renderlist(renderlist_t *a, GLenum mode,
GLsizei skip, GLsizei count, GLushort* indices, int ilen_b) {
DBG(LOGD("arrays_add_renderlist(%p, %s, %d, %d, %p, %d)\n", a, PrintEnum(mode), skip, count, indices, ilen_b);)
// check cache if any
if(glstate->vao->shared_arrays) {
if (!is_cache_compatible(count))
VaoSharedClear(glstate->vao);
}
// append all draw elements of b in a
// check the final indice size of a and b
int ilen_a = a->ilen;
int len_b = count-skip;
// lets append all the arrays
unsigned long cap = a->cap;
if (a->len + len_b >= cap) cap += len_b + DEFAULT_RENDER_LIST_CAPACITY;
// Unshare if shared (shared array are not used for now)
unshared_renderlist(a, cap);
redim_renderlist(a, cap);
unsharedindices_renderlist(a, ((ilen_a)?ilen_a:a->len) + ((ilen_b)?ilen_b:len_b));
// append arrays
if(glstate->vao->shared_arrays) {
if (a->vert) memcpy(a->vert+a->len*4, glstate->vao->vert.ptr+skip*4, len_b*4*sizeof(GLfloat));
if (a->normal) memcpy(a->normal+a->len*3, glstate->vao->normal.ptr+skip*3, len_b*3*sizeof(GLfloat));
if (a->color) memcpy(a->color+a->len*4, glstate->vao->color.ptr+skip*4, len_b*4*sizeof(GLfloat));
if (a->secondary) memcpy(a->secondary+a->len*4, glstate->vao->secondary.ptr+skip*4, len_b*4*sizeof(GLfloat));
if (a->fogcoord) memcpy(a->fogcoord+a->len*1, glstate->vao->fog.ptr+skip*1, len_b*1*sizeof(GLfloat));
for (int i=0; i<a->maxtex; i++)
if (a->tex[i]) memcpy(a->tex[i]+a->len*4, glstate->vao->tex[i].ptr+skip*4, len_b*4*sizeof(GLfloat));
} else {
if (a->vert) copy_gl_pointer_tex_noalloc(a->vert+a->len*4, &glstate->vao->vertexattrib[ATT_VERTEX], 4, skip, count);
if (a->normal) copy_gl_pointer_raw_noalloc(a->normal+a->len*3, &glstate->vao->vertexattrib[ATT_NORMAL], 3, skip, count);
if (a->color) {
if(glstate->vao->vertexattrib[ATT_COLOR].size==GL_BGRA)
copy_gl_pointer_color_bgra_noalloc(a->color+a->len*4, glstate->vao->vertexattrib[ATT_COLOR].pointer, glstate->vao->vertexattrib[ATT_COLOR].stride, 4, skip, count);
else
copy_gl_pointer_color_noalloc(a->color+a->len*4, &glstate->vao->vertexattrib[ATT_COLOR], 4, skip, count);
}
if (a->secondary)
if(glstate->vao->vertexattrib[ATT_SECONDARY].size==GL_BGRA)
copy_gl_pointer_color_bgra_noalloc(a->secondary+a->len*4, glstate->vao->vertexattrib[ATT_SECONDARY].pointer, glstate->vao->vertexattrib[ATT_SECONDARY].stride, 4, skip, count);
else
copy_gl_pointer_noalloc(a->secondary+a->len*4, &glstate->vao->vertexattrib[ATT_SECONDARY], 4, skip, count); // alpha chanel is always 0 for secondary...
if (a->fogcoord) copy_gl_pointer_raw_noalloc(a->fogcoord+a->len*1, &glstate->vao->vertexattrib[ATT_FOGCOORD], 1, skip, count);
for (int i=0; i<a->maxtex; i++)
if (a->tex[i]) copy_gl_pointer_tex_noalloc(a->tex[i]+a->len*4, &glstate->vao->vertexattrib[ATT_MULTITEXCOORD0+i], 4, skip, count);
}
// indices
int old_ilenb = ilen_b;
if(!a->mode_inits) list_add_modeinit(a, a->mode_init);
if (ilen_a || ilen_b || mode_needindices(a->mode) || mode_needindices(mode)
|| (a->mode!=mode && (a->mode==GL_QUADS || mode==GL_QUADS)) )
{
// alloc or realloc a->indices first...
ilen_b = indices_getindicesize(mode, ((indices)? ilen_b:len_b));
prepareadd_renderlist(a, ilen_b);
// then append b
doadd_renderlist(a, mode, indices, indices?old_ilenb:len_b, ilen_b);
}
// lenghts
a->len += len_b;
if(a->mode_inits) list_add_modeinit(a, mode);
//all done
a->stage = STAGE_DRAW; // just in case
return a;
}
static inline bool should_intercept_render(GLenum mode) {
// check bounded tex that will be used if one need some transformations
if (hardext.esversion==1) // but only for ES1.1
for (int aa=0; aa<hardext.maxtex; aa++) {
if (glstate->enable.texture[aa]) {
if ((hardext.esversion==1) && ((glstate->enable.texgen_s[aa] || glstate->enable.texgen_t[aa] || glstate->enable.texgen_r[aa] || glstate->enable.texgen_q[aa])))
return true;
if ((!glstate->vao->vertexattrib[ATT_MULTITEXCOORD0+aa].enabled) && !(mode==GL_POINT && glstate->texture.pscoordreplace[aa]))
return true;
if ((glstate->vao->vertexattrib[ATT_MULTITEXCOORD0+aa].enabled) && (glstate->vao->vertexattrib[ATT_MULTITEXCOORD0+aa].size == 1))
return true;
}
}
if(glstate->polygon_mode == GL_LINE && mode>=GL_TRIANGLES)
return true;
if ((hardext.esversion==1) && ((glstate->vao->vertexattrib[ATT_SECONDARY].enabled) && (glstate->vao->vertexattrib[ATT_COLOR].enabled)))
return true;
if ((hardext.esversion==1) && (glstate->vao->vertexattrib[ATT_COLOR].enabled && (glstate->vao->vertexattrib[ATT_COLOR].size != 4)))
return true;
//if (glstate->vao->vertex || glstate->vao->elements)
// return false; // don't try to intercept VAO
return (
(glstate->vao->vertexattrib[ATT_VERTEX].enabled && ! valid_vertex_type(glstate->vao->vertexattrib[ATT_VERTEX].type)) ||
(mode == GL_LINES && glstate->enable.line_stipple) ||
/*(mode == GL_QUADS) ||*/ (glstate->list.active && !glstate->list.pending)
);
}
GLuint len_indices(const GLushort *sindices, const GLuint *iindices, GLsizei count) {
GLuint len = 0;
if (sindices) {
for (int i=0; i<count; i++)
if (len<sindices[i]) len = sindices[i]; // get the len of the arrays
} else {
for (int i=0; i<count; i++)
if (len<iindices[i]) len = iindices[i]; // get the len of the arrays
}
return len+1; // lenght is max(indices) + 1 !
}
static void glDrawElementsCommon(GLenum mode, GLint first, GLsizei count, GLuint len, const GLushort *sindices, const GLuint *iindices, int instancecount) {
if (glstate->raster.bm_drawing)
bitmap_flush();
DBG(printf("glDrawElementsCommon(%s, %d, %d, %d, %p, %p, %d)\n", PrintEnum(mode), first, count, len, sindices, iindices, instancecount);)
LOAD_GLES_FPE(glDrawElements);
LOAD_GLES_FPE(glDrawArrays);
LOAD_GLES_FPE(glNormalPointer);
LOAD_GLES_FPE(glVertexPointer);
LOAD_GLES_FPE(glColorPointer);
LOAD_GLES_FPE(glTexCoordPointer);
LOAD_GLES_FPE(glEnable);
LOAD_GLES_FPE(glDisable);
LOAD_GLES_FPE(glMultiTexCoord4f);
#define client_state(A, B, C) \
if((glstate->vao->vertexattrib[A].enabled != glstate->gleshard->vertexattrib[A].enabled) || (hardext.esversion!=1)) { \
C \
if(glstate->vao->vertexattrib[A].enabled) \
fpe_glEnableClientState(B); \
else \
fpe_glDisableClientState(B); \
}
#if 0
// FEZ draw the stars (intro menu and the ones visible by night)
// by drawing a huge list of 500k+ triangles!
// it's a bit too much for mobile hardware, so it can be simply disabled here
if(count>500000) return;
#endif
GLenum mode_init = mode;
/*if (glstate->polygon_mode == GL_LINE && mode>=GL_TRIANGLES)
mode = GL_LINE_LOOP;*/
if (glstate->polygon_mode == GL_POINT && mode>=GL_TRIANGLES)
mode = GL_POINTS;
if (mode == GL_QUAD_STRIP)
mode = GL_TRIANGLE_STRIP;
if (mode == GL_POLYGON)
mode = GL_TRIANGLE_FAN;
if (mode == GL_QUADS) {
mode = GL_TRIANGLES;
int ilen = (count*3)/2;
if (iindices) {
gl4es_scratch(ilen*sizeof(GLuint));
GLuint *tmp = (GLuint*)glstate->scratch;
for (int i=0, j=0; i+3<count; i+=4, j+=6) {
tmp[j+0] = iindices[i+0];
tmp[j+1] = iindices[i+1];
tmp[j+2] = iindices[i+2];
tmp[j+3] = iindices[i+0];
tmp[j+4] = iindices[i+2];
tmp[j+5] = iindices[i+3];
}
iindices = tmp;
} else {
gl4es_scratch(ilen*sizeof(GLushort));
GLushort *tmp = (GLushort*)glstate->scratch;
for (int i=0, j=0; i+3<count; i+=4, j+=6) {
tmp[j+0] = sindices[i+0];
tmp[j+1] = sindices[i+1];
tmp[j+2] = sindices[i+2];
tmp[j+3] = sindices[i+0];
tmp[j+4] = sindices[i+2];
tmp[j+5] = sindices[i+3];
}
sindices = tmp;
}
count = ilen;
}
// of course, GL_SELECT with shader will just not work if not using standard transformation method... Instance count is ignored also
if (glstate->render_mode == GL_SELECT) {
// TODO handling uint indices
if(!sindices && !iindices)
select_glDrawArrays(&glstate->vao->vertexattrib[ATT_VERTEX], mode, first, count);
else
select_glDrawElements(&glstate->vao->vertexattrib[ATT_VERTEX], mode, count, sindices?GL_UNSIGNED_SHORT:GL_UNSIGNED_INT, sindices?((void*)sindices):((void*)iindices));
} else {
GLuint old_tex = glstate->texture.client;
realize_textures(1);
if(hardext.esversion==1) {
#define TEXTURE(A) gl4es_glClientActiveTexture(A+GL_TEXTURE0);
vertexattrib_t *p;
#define GetP(A) (&glstate->vao->vertexattrib[A])
// secondary color and color sizef != 4 are "intercepted" and draw using a list, unless using ES>1.1
client_state(ATT_COLOR, GL_COLOR_ARRAY, );
p = GetP(ATT_COLOR);
if (p->enabled)
gles_glColorPointer(p->size, p->type, p->stride, p->pointer);
client_state(ATT_NORMAL, GL_NORMAL_ARRAY, );
p = GetP(ATT_NORMAL);
if (p->enabled)
gles_glNormalPointer(p->type, p->stride, p->pointer);
client_state(ATT_VERTEX, GL_VERTEX_ARRAY, );
p = GetP(ATT_VERTEX);
if (p->enabled)
gles_glVertexPointer(p->size, p->type, p->stride, p->pointer);
for (int aa=0; aa<hardext.maxtex; aa++) {
client_state(ATT_MULTITEXCOORD0+aa, GL_TEXTURE_COORD_ARRAY, TEXTURE(aa););
p = GetP(ATT_MULTITEXCOORD0+aa);
// get 1st enabled target
const GLint itarget = get_target(glstate->enable.texture[aa]);
if (itarget>=0) {
if (!IS_TEX2D(glstate->enable.texture[aa]) && (IS_ANYTEX(glstate->enable.texture[aa]))) {
gl4es_glActiveTexture(GL_TEXTURE0+aa);
realize_active();
gles_glEnable(GL_TEXTURE_2D);
}
if (p->enabled) {
TEXTURE(aa);
int changes = tex_setup_needchange(itarget);
if(changes && !len) len = len_indices(sindices, iindices, count);
tex_setup_texcoord(len, changes, itarget, p);
} else
gles_glMultiTexCoord4f(GL_TEXTURE0+aa, glstate->texcoord[aa][0], glstate->texcoord[aa][1], glstate->texcoord[aa][2], glstate->texcoord[aa][3]);
}
}
#undef GetP
if (glstate->texture.client!=old_tex)
TEXTURE(old_tex);
}
// check if arrays are locked and can be put in a VBO
if(hardext.esversion>1 && globals4es.usevbo==2 && glstate->vao->locked==1) {
// can now browse all enabled VA, and put the corresponding data in a VBO
// warning, with the use of first
// Checking only Vertex Attrib for now!
// TODO: check all va, and take care of interleaved ones...
ToBuffer(glstate->vao->first, glstate->vao->count);
}
if(hardext.esversion>1 && globals4es.usevbo==3 && (glstate->vao->locked==1 || glstate->vao->locked==2)) {
if(glstate->vao->locked==1)
glstate->vao->locked++;
else
ToBuffer(glstate->vao->first, glstate->vao->count);
}
// POLYGON mode as LINE is "intercepted" and drawn using list
if(instancecount==1 || hardext.esversion==1) {
if(!iindices && !sindices)
gles_glDrawArrays(mode, first, count);
else
gles_glDrawElements(mode, count, (sindices)?GL_UNSIGNED_SHORT:GL_UNSIGNED_INT, (sindices?((void*)sindices):((void*)iindices)));
} else {
if(!iindices && !sindices)
fpe_glDrawArraysInstanced(mode, first, count,instancecount);
else {
void* tmp=(sindices?((void*)sindices):((void*)iindices));
GLenum t = (sindices)?GL_UNSIGNED_SHORT:GL_UNSIGNED_INT;
fpe_glDrawElementsInstanced(mode, count, t, tmp, instancecount);
}
}
for (int aa=0; aa<hardext.maxtex; aa++) {
if (!IS_TEX2D(glstate->enable.texture[aa]) && (IS_ANYTEX(glstate->enable.texture[aa]))) {
gl4es_glActiveTexture(GL_TEXTURE0+aa);
realize_active();
gles_glDisable(GL_TEXTURE_2D);
}
}
if (glstate->texture.client!=old_tex)
TEXTURE(old_tex);
#undef TEXTURE
}
}
#define MIN_BATCH globals4es.minbatch
#define MAX_BATCH globals4es.maxbatch
void APIENTRY_GL4ES gl4es_glDrawRangeElements(GLenum mode, GLuint start, GLuint end, GLsizei count, GLenum type, const void *indices) {
DBG(printf("glDrawRangeElements(%s, %i, %i, %i, %s, @%p), inlist=%i, pending=%d\n", PrintEnum(mode), start, end, count, PrintEnum(type), indices, (glstate->list.active)?1:0, glstate->list.pending);)
count = adjust_vertices(mode, count);
if (count<0) {
errorShim(GL_INVALID_VALUE);
return;
}
if (count==0) {
noerrorShim();
return;
}
bool compiling = (glstate->list.active);
bool intercept = should_intercept_render(mode);
//BATCH Mode
if(!compiling) {
if((!intercept && !glstate->list.pending && (count>=MIN_BATCH && count<=MAX_BATCH))
|| (intercept && globals4es.maxbatch)) {
compiling = true;
glstate->list.pending = 1;
glstate->list.active = alloc_renderlist();
}
}
noerrorShim();
GLushort *sindices = NULL;
GLuint *iindices = NULL;
bool need_free = !(
(type==GL_UNSIGNED_SHORT) ||
(!compiling && !intercept && type==GL_UNSIGNED_INT && hardext.elementuint)
);
if(need_free) {
sindices = copy_gl_array((glstate->vao->elements)?(void*)((char*)glstate->vao->elements->data + (uintptr_t)indices):indices,
type, 1, 0, GL_UNSIGNED_SHORT, 1, 0, count, NULL);
} else {
if(type==GL_UNSIGNED_INT)
iindices = (glstate->vao->elements)?((void*)((char*)glstate->vao->elements->data + (uintptr_t)indices)):(GLvoid*)indices;
else
sindices = (glstate->vao->elements)?((void*)((char*)glstate->vao->elements->data + (uintptr_t)indices)):(GLvoid*)indices;
}
if (compiling) {
// TODO, handle uint indices
renderlist_t *list = glstate->list.active;
if(!need_free) {
GLushort *tmp = sindices;
sindices = (GLushort*)malloc(count*sizeof(GLushort));
memcpy(sindices, tmp, count*sizeof(GLushort));
}
for (int i=0; i<count; i++) sindices[i]-=start; //TODO: should be optimizable
if(globals4es.mergelist && list->stage>=STAGE_DRAW && is_list_compatible(list) && !list->use_glstate && sindices) {
list = NewDrawStage(list, mode);
if(list->vert) {
glstate->list.active = arrays_add_renderlist(list, mode, start, end + 1, sindices, count);
NewStage(glstate->list.active, STAGE_POSTDRAW);
return;
}
}
NewStage(list, STAGE_DRAW);
glstate->list.active = list = arrays_to_renderlist(list, mode, start, end + 1);
list->indices = sindices;
list->ilen = count;
list->indice_cap = count;
//end_renderlist(list);
NewStage(glstate->list.active, STAGE_POSTDRAW);
return;
}
if (intercept) {
//TODO handling uint indices
renderlist_t *list = NULL;
if(!need_free) {
GLushort *tmp = sindices;
sindices = (GLushort*)malloc(count*sizeof(GLushort));
memcpy(sindices, tmp, count*sizeof(GLushort));
}
for (int i=0; i<count; i++) sindices[i]-=start;
list = arrays_to_renderlist(list, mode, start, end + 1);
list->indices = sindices;
list->ilen = count;
list->indice_cap = count;
list = end_renderlist(list);
draw_renderlist(list);
free_renderlist(list);
return;
} else {
glDrawElementsCommon(mode, 0, count, end+1, sindices, iindices, 1);
if(need_free)
free(sindices);
}
}
AliasExport(void,glDrawRangeElements,,(GLenum mode,GLuint start,GLuint end,GLsizei count,GLenum type,const void *indices));
AliasExport(void,glDrawRangeElements,EXT,(GLenum mode,GLuint start,GLuint end,GLsizei count,GLenum type,const void *indices));
void APIENTRY_GL4ES gl4es_glDrawElements(GLenum mode, GLsizei count, GLenum type, const GLvoid *indices) {
DBG(printf("glDrawElements(%s, %d, %s, %p), vtx=%p map=%p, pending=%d\n", PrintEnum(mode), count, PrintEnum(type), indices, (glstate->vao->vertex)?glstate->vao->vertex->data:NULL, (glstate->vao->elements)?glstate->vao->elements->data:NULL, glstate->list.pending);)
// TODO: split for count > 65535?
// special check for QUADS and TRIANGLES that need multiple of 4 or 3 vertex...
count = adjust_vertices(mode, count);
if (count<0) {
errorShim(GL_INVALID_VALUE);
return;
}
if (count==0) {
noerrorShim();
return;
}
bool compiling = (glstate->list.active);
bool intercept = should_intercept_render(mode);
//BATCH Mode
if(!compiling) {
if((!intercept && !glstate->list.pending && (count>=MIN_BATCH && count<=MAX_BATCH))
|| (intercept && globals4es.maxbatch)) {
compiling = true;
glstate->list.pending = 1;
glstate->list.active = alloc_renderlist();
}
}
noerrorShim();
GLushort *sindices = NULL;
GLuint *iindices = NULL;
GLuint old_index = 0;
bool need_free = !(
(type==GL_UNSIGNED_SHORT) ||
(!compiling && !intercept && type==GL_UNSIGNED_INT && hardext.elementuint)
);
if(need_free) {
sindices = copy_gl_array((glstate->vao->elements)?(void*)((char*)glstate->vao->elements->data + (uintptr_t)indices):indices,
type, 1, 0, GL_UNSIGNED_SHORT, 1, 0, count, NULL);
old_index = wantBufferIndex(0);
} else {
if(type==GL_UNSIGNED_INT)
iindices = (glstate->vao->elements)?((void*)((char*)glstate->vao->elements->data + (uintptr_t)indices)):(GLvoid*)indices;
else
sindices = (glstate->vao->elements)?((void*)((char*)glstate->vao->elements->data + (uintptr_t)indices)):(GLvoid*)indices;
}
if (compiling) {
// TODO, handle uint indices
renderlist_t *list = glstate->list.active;
GLsizei min, max;
if(!need_free) {
GLushort *tmp = sindices;
sindices = (GLushort*)malloc(count*sizeof(GLushort));
memcpy(sindices, tmp, count*sizeof(GLushort));
}
normalize_indices_us(sindices, &max, &min, count);
if(globals4es.mergelist && list->stage>=STAGE_DRAW && is_list_compatible(list) && !list->use_glstate && sindices) {
list = NewDrawStage(list, mode);
glstate->list.active = arrays_add_renderlist(list, mode, min, max + 1, sindices, count);
NewStage(glstate->list.active, STAGE_POSTDRAW);
return;
}
NewStage(list, STAGE_DRAW);
glstate->list.active = list = arrays_to_renderlist(list, mode, min, max + 1);
list->indices = sindices;
list->ilen = count;
list->indice_cap = count;
//end_renderlist(list);
NewStage(glstate->list.active, STAGE_POSTDRAW);
return;
}
if (intercept) {
//TODO handling uint indices
renderlist_t *list = NULL;
GLsizei min, max;
if(!need_free) {
GLushort *tmp = sindices;
sindices = (GLushort*)malloc(count*sizeof(GLushort));
memcpy(sindices, tmp, count*sizeof(GLushort));
}
normalize_indices_us(sindices, &max, &min, count);
list = arrays_to_renderlist(list, mode, min, max + 1);
list->indices = sindices;
list->ilen = count;
list->indice_cap = count;
list = end_renderlist(list);
draw_renderlist(list);
free_renderlist(list);
return;
} else {
glDrawElementsCommon(mode, 0, count, 0, sindices, iindices, 1);
if(need_free) {
free(sindices);
wantBufferIndex(old_index);
}
}
}
AliasExport(void,glDrawElements,,(GLenum mode, GLsizei count, GLenum type, const GLvoid *indices));
void APIENTRY_GL4ES gl4es_glDrawArrays(GLenum mode, GLint first, GLsizei count) {
DBG(printf("glDrawArrays(%s, %d, %d), list=%p pending=%d\n", PrintEnum(mode), first, count, glstate->list.active, glstate->list.pending);)
// special check for QUADS and TRIANGLES that need multiple of 4 or 3 vertex...
count = adjust_vertices(mode, count);
if (count<0) {
errorShim(GL_INVALID_VALUE);
return;
}
if (count==0) {
noerrorShim();
return;
}
// special case for (very) large GL_QUADS array
if ((mode==GL_QUADS) && (count>4*8000)) {
// split the array in manageable slice
int cnt = 4*8000;
for (int i=0; i<count; i+=4*8000) {
if (i+cnt>count) cnt = count-i;
gl4es_glDrawArrays(mode, i, cnt);
}
return;
}
noerrorShim();
bool intercept = should_intercept_render(mode);
//BATCH Mode
if (!glstate->list.compiling) {
if((!intercept && !glstate->list.pending && (count>=MIN_BATCH && count<=MAX_BATCH))
|| (intercept && globals4es.maxbatch)) {
glstate->list.pending = 1;
glstate->list.active = alloc_renderlist();
}
}
if (glstate->list.active) {
renderlist_t *list = glstate->list.active;
if(globals4es.mergelist && list->stage>=STAGE_DRAW && is_list_compatible(list) && !list->use_glstate) {
list = NewDrawStage(list, mode);
if(list->vert) {
glstate->list.active = arrays_add_renderlist(list, mode, first, count+first, NULL, 0);
NewStage(glstate->list.active, STAGE_POSTDRAW);
return;
}
}
NewStage(list, STAGE_DRAW);
glstate->list.active = arrays_to_renderlist(list, mode, first, count+first);
NewStage(glstate->list.active, STAGE_POSTDRAW);
return;
}
/*if (glstate->polygon_mode == GL_LINE && mode>=GL_TRIANGLES)
mode = GL_LINE_LOOP;*/
if (glstate->polygon_mode == GL_POINT && mode>=GL_TRIANGLES)
mode = GL_POINTS;
if (intercept) {
renderlist_t *list;
list = arrays_to_renderlist(NULL, mode, first, count+first);
list = end_renderlist(list);
draw_renderlist(list);
free_renderlist(list);
} else {
if (mode==GL_QUADS) {
// TODO: move those static in glstate
static GLushort *indices = NULL;
static int indcnt = 0;
static int indfirst = 0;
int realfirst = ((first%4)==0)?0:first;
int realcount = count + (first-realfirst);
if((indcnt < realcount) || (indfirst!=realfirst)) {
if(indcnt < realcount) {
indcnt = realcount;
if (indices) free(indices);
indices = (GLushort*)malloc(sizeof(GLushort)*(indcnt*3/2));
}
indfirst = realfirst;
GLushort *p = indices;
for (int i=0, j=indfirst; i+3<indcnt; i+=4, j+=4) {
*(p++) = j + 0;
*(p++) = j + 1;
*(p++) = j + 2;
*(p++) = j + 0;
*(p++) = j + 2;
*(p++) = j + 3;
}
}
GLuint old_buffer = wantBufferIndex(0);
glDrawElementsCommon(GL_TRIANGLES, 0, count*3/2, count, indices+(first-indfirst)*3/2, NULL, 1);
wantBufferIndex(old_buffer);
return;
}
glDrawElementsCommon(mode, first, count, count, NULL, NULL, 1);
}
}
AliasExport(void,glDrawArrays,,(GLenum mode, GLint first, GLsizei count));
AliasExport(void,glDrawArrays,EXT,(GLenum mode, GLint first, GLsizei count));
void APIENTRY_GL4ES gl4es_glMultiDrawArrays(GLenum mode, const GLint *firsts, const GLsizei *counts, GLsizei primcount)
{
DBG(printf("glMultiDrawArrays(%s, %p, %p, %d), list=%p pending=%d\n", PrintEnum(mode), firsts, counts, primcount, glstate->list.active, glstate->list.pending);)
if(!primcount) {
noerrorShim();
return;
}
bool compiling = (glstate->list.active);
bool intercept = should_intercept_render(mode);
GLsizei maxcount=counts[0];
GLsizei mincount=counts[0];
for (int i=1; i<primcount; i++) {
if(counts[i]>maxcount) maxcount=counts[i];
if(counts[i]<mincount) mincount=counts[i];
}
//BATCH Mode
if(!compiling) {
if(!intercept && glstate->list.pending && maxcount>MAX_BATCH) // too large and will not intercept, stop the BATCH
gl4es_flush();
else if((!intercept && !glstate->list.pending && mincount<MIN_BATCH)
|| (intercept && globals4es.maxbatch)) {
compiling = true;
glstate->list.pending = 1;
glstate->list.active = alloc_renderlist();
}
}
renderlist_t *list = NULL;
GLenum err = 0;
for (int i=0; i<primcount; i++) {
GLsizei count = adjust_vertices(mode, counts[i]);
GLint first = firsts[i];
if (count<0) {
err = GL_INVALID_VALUE;
continue;
}
if (count==0) {
continue;
}
if (compiling) {
if(globals4es.mergelist && glstate->list.active->stage>=STAGE_DRAW && is_list_compatible(glstate->list.active) && !glstate->list.active->use_glstate) {
glstate->list.active = NewDrawStage(glstate->list.active, mode);
glstate->list.active = arrays_add_renderlist(glstate->list.active, mode, first, count+first, NULL, 0);
NewStage(glstate->list.active, STAGE_POSTDRAW);
continue;
}
NewStage(glstate->list.active, STAGE_DRAW);
glstate->list.active = arrays_to_renderlist(glstate->list.active, mode, first, count+first);
NewStage(glstate->list.active, STAGE_POSTDRAW);
continue;
}
if (glstate->polygon_mode == GL_POINT && mode>=GL_TRIANGLES)
mode = GL_POINTS;
if (intercept) {
if(list) {
NewStage(list, STAGE_DRAW);
}
if(globals4es.mergelist && list->stage>=STAGE_DRAW && is_list_compatible(list) && !list->use_glstate) {
list = NewDrawStage(list, mode);
list = arrays_add_renderlist(list, mode, first, count+first, NULL, 0);
NewStage(list, STAGE_POSTDRAW);
}
else
list = arrays_to_renderlist(NULL, mode, first, count+first);
} else {
if (mode==GL_QUADS) {
// TODO: move those static in glstate
static GLushort *indices = NULL;
static int indcnt = 0;
static int indfirst = 0;
int realfirst = ((first%4)==0)?0:first;
int realcount = count + (first-realfirst);
if((indcnt < realcount) || (indfirst!=realfirst)) {
if(indcnt < realcount) {
indcnt = realcount;
if (indices) free(indices);
indices = (GLushort*)malloc(sizeof(GLushort)*(indcnt*3/2));
}
indfirst = realfirst;
GLushort *p = indices;
for (int i=0, j=indfirst; i+3<indcnt; i+=4, j+=4) {
*(p++) = j + 0;
*(p++) = j + 1;
*(p++) = j + 2;
*(p++) = j + 0;
*(p++) = j + 2;
*(p++) = j + 3;
}
}
GLuint old_index = wantBufferIndex(0);
glDrawElementsCommon(GL_TRIANGLES, 0, count*3/2, count, indices+(first-indfirst)*3/2, NULL, 1);
wantBufferIndex(old_index);
continue;
}
glDrawElementsCommon(mode, first, count, count, NULL, NULL, 1);
}
}
if(list) {
list = end_renderlist(list);
draw_renderlist(list);
free_renderlist(list);
}
if(err)
errorShim(err);
else
errorGL();
}
AliasExport(void,glMultiDrawArrays,,(GLenum mode, const GLint *first, const GLsizei *count, GLsizei primcount));
void APIENTRY_GL4ES gl4es_glMultiDrawElements( GLenum mode, GLsizei *counts, GLenum type, const void * const *indices, GLsizei primcount)
{
DBG(printf("glMultiDrawElements(%s, %p, %s, %p, %d), list=%p pending=%d\n", PrintEnum(mode), counts, PrintEnum(type), indices, primcount, glstate->list.active, glstate->list.pending);)
if(!primcount) {
noerrorShim();
return;
}
bool compiling = (glstate->list.active);
bool intercept = should_intercept_render(mode);
GLsizei maxcount=counts[0];
GLsizei mincount=counts[0];
for (int i=1; i<primcount; i++) {
if(counts[i]>maxcount) maxcount=counts[i];
if(counts[i]<mincount) mincount=counts[i];
}
//BATCH Mode
if(!compiling) {
if(!intercept && glstate->list.pending && maxcount>MAX_BATCH) // too large and will not intercept, stop the BATCH
gl4es_flush();
else if((!intercept && !glstate->list.pending && mincount<MIN_BATCH)
|| (intercept && globals4es.maxbatch)) {
compiling = true;
glstate->list.pending = 1;
glstate->list.active = alloc_renderlist();
}
}
renderlist_t *list = NULL;
for (int i=0; i<primcount; i++) {
GLsizei count = adjust_vertices(mode, counts[i]);
if (count<0) {
errorShim(GL_INVALID_VALUE);
continue;
}
if (count==0) {
noerrorShim();
continue;
}
noerrorShim();
GLushort *sindices = NULL;
GLuint *iindices = NULL;
GLuint old_index = 0;
bool need_free = !(
(type==GL_UNSIGNED_SHORT) ||
(!compiling && !intercept && type==GL_UNSIGNED_INT && hardext.elementuint)
);
if(need_free) {
sindices = copy_gl_array((glstate->vao->elements)?(void*)((char*)glstate->vao->elements->data + (uintptr_t)indices):indices,
type, 1, 0, GL_UNSIGNED_SHORT, 1, 0, count, NULL);
old_index = wantBufferIndex(0);
} else {
if(type==GL_UNSIGNED_INT)
iindices = (glstate->vao->elements)?((void*)((char*)glstate->vao->elements->data + (uintptr_t)indices)):(GLvoid*)indices;
else
sindices = (glstate->vao->elements)?((void*)((char*)glstate->vao->elements->data + (uintptr_t)indices)):(GLvoid*)indices;
}
if (compiling) {
// TODO, handle uint indices
renderlist_t *list = NULL;
GLsizei min, max;
NewStage(glstate->list.active, STAGE_DRAW);
list = glstate->list.active;
if(!need_free) {
GLushort *tmp = sindices;
sindices = (GLushort*)malloc(count*sizeof(GLushort));
memcpy(sindices, tmp, count*sizeof(GLushort));
}
normalize_indices_us(sindices, &max, &min, count);
list = arrays_to_renderlist(list, mode, min, max + 1);
list->indices = sindices;
list->ilen = count;
list->indice_cap = count;
//end_renderlist(list);
if(glstate->list.pending) {
NewStage(glstate->list.active, STAGE_POSTDRAW);
} else {
glstate->list.active = extend_renderlist(list);
}
continue;
}
if (intercept) {
//TODO handling uint indices
renderlist_t *list = NULL;
GLsizei min, max;
if(!need_free) {
GLushort *tmp = sindices;
sindices = (GLushort*)malloc(count*sizeof(GLushort));
memcpy(sindices, tmp, count*sizeof(GLushort));
}
normalize_indices_us(sindices, &max, &min, count);
if(list) {
NewStage(list, STAGE_DRAW);
}
list = arrays_to_renderlist(list, mode, min, max + 1);
list->indices = sindices;
list->ilen = count;
list->indice_cap = count;
continue;
} else {
glDrawElementsCommon(mode, 0, count, 0, sindices, iindices, 1);
if(need_free) {
free(sindices);
wantBufferIndex(old_index);
}
}
}
if(list) {
list = end_renderlist(list);
draw_renderlist(list);
free_renderlist(list);
}
}
AliasExport(void,glMultiDrawElements,,( GLenum mode, GLsizei *count, GLenum type, const void * const *indices, GLsizei primcount));
void APIENTRY_GL4ES gl4es_glMultiDrawElementsBaseVertex( GLenum mode, GLsizei *counts, GLenum type, const void * const *indices, GLsizei primcount, const GLint * basevertex) {
DBG(printf("glMultiDrawElementsBaseVertex(%s, %p, %s, @%p, %d, @%p), inlist=%i, pending=%d\n", PrintEnum(mode), counts, PrintEnum(type), indices, primcount, basevertex, (glstate->list.active)?1:0, glstate->list.pending);)
// divide the call, should try something better one day...
bool compiling = (glstate->list.active);
bool intercept = should_intercept_render(mode);
//BATCH Mode
GLsizei maxcount=counts[0];
GLsizei mincount=counts[0];
for (int i=1; i<primcount; i++) {
if(counts[i]>maxcount) maxcount=counts[i];
if(counts[i]<mincount) mincount=counts[i];
}
if(!compiling) {
if(!intercept && glstate->list.pending && maxcount>MAX_BATCH) // too large and will not intercept, stop the BATCH
gl4es_flush();
else if((!intercept && !glstate->list.pending && mincount<MIN_BATCH)
|| (intercept && globals4es.maxbatch)) {
compiling = true;
glstate->list.pending = 1;
glstate->list.active = alloc_renderlist();
}
}
renderlist_t *list = NULL;
for (int i=0; i<primcount; i++) {
GLsizei count = adjust_vertices(mode, counts[i]);
if (count<0) {
errorShim(GL_INVALID_VALUE);
continue;
}
if (count==0) {
noerrorShim();
continue;
}
noerrorShim();
GLushort *sindices = NULL;
GLuint *iindices = NULL;
if(type==GL_UNSIGNED_INT && hardext.elementuint && !compiling && !intercept)
iindices = copy_gl_array((glstate->vao->elements)?(void*)((char*)glstate->vao->elements->data + (uintptr_t)indices):indices,
type, 1, 0, GL_UNSIGNED_INT, 1, 0, count, NULL);
else
sindices = copy_gl_array((glstate->vao->elements)?(void*)((char*)glstate->vao->elements->data + (uintptr_t)indices):indices,
type, 1, 0, GL_UNSIGNED_SHORT, 1, 0, count, NULL);
if (compiling) {
// TODO, handle uint indices
renderlist_t *list = NULL;
GLsizei min, max;
NewStage(glstate->list.active, STAGE_DRAW);
list = glstate->list.active;
normalize_indices_us(sindices, &max, &min, count);
list = arrays_to_renderlist(list, mode, min + basevertex[i], max + basevertex[i] + 1);
list->indices = sindices;
list->ilen = count;
list->indice_cap = count;
//end_renderlist(list);
if(glstate->list.pending) {
NewStage(glstate->list.active, STAGE_POSTDRAW);
} else {
glstate->list.active = extend_renderlist(list);
}
continue;
}
if (intercept) {
//TODO handling uint indices
GLsizei min, max;
normalize_indices_us(sindices, &max, &min, count);
if(list) {
NewStage(list, STAGE_DRAW);
}
list = arrays_to_renderlist(list, mode, min + basevertex[i], max + basevertex[i] + 1);
list->indices = sindices;
list->ilen = count;
list->indice_cap = count;
continue;
} else {
if(iindices)
for(int i=0; i<count; i++) iindices[i]+=basevertex[i];
else
for(int i=0; i<count; i++) sindices[i]+=basevertex[i];
GLuint old_index = wantBufferIndex(0);
glDrawElementsCommon(mode, 0, count, 0, sindices, iindices, 1);
if(iindices)
free(iindices);
else
free(sindices);
wantBufferIndex(old_index);
}
}
if(list) {
list = end_renderlist(list);
draw_renderlist(list);
free_renderlist(list);
}
}
AliasExport(void,glMultiDrawElementsBaseVertex,,( GLenum mode, GLsizei *count, GLenum type, const void * const *indices, GLsizei primcount, const GLint * basevertex));
AliasExport(void,glMultiDrawElementsBaseVertex,ARB,( GLenum mode, GLsizei *count, GLenum type, const void * const *indices, GLsizei primcount, const GLint * basevertex));
void APIENTRY_GL4ES gl4es_glDrawElementsBaseVertex(GLenum mode, GLsizei count, GLenum type, const void *indices, GLint basevertex) {
DBG(printf("glDrawElementsBaseVertex(%s, %d, %s, %p, %d), vtx=%p map=%p, pending=%d\n", PrintEnum(mode), count, PrintEnum(type), indices, basevertex, (glstate->vao->vertex)?glstate->vao->vertex->data:NULL, (glstate->vao->elements)?glstate->vao->elements->data:NULL, glstate->list.pending);)
if(basevertex==0)
gl4es_glDrawElements(mode, count, type, indices);
else {
count = adjust_vertices(mode, count);
if (count<0) {
errorShim(GL_INVALID_VALUE);
return;
}
if (count==0) {
noerrorShim();
return;
}
bool compiling = (glstate->list.active);
bool intercept = should_intercept_render(mode);
//BATCH Mode
if(!compiling) {
if(!intercept && glstate->list.pending && count>MAX_BATCH) // too large and will not intercept, stop the BATCH
gl4es_flush();
else if((!intercept && !glstate->list.pending && count<MIN_BATCH)
|| (intercept && globals4es.maxbatch)) {
compiling = true;
glstate->list.pending = 1;
glstate->list.active = alloc_renderlist();
}
}
noerrorShim();
GLushort *sindices = NULL;
GLuint *iindices = NULL;
if(type==GL_UNSIGNED_INT && hardext.elementuint && !compiling && !intercept)
iindices = copy_gl_array((glstate->vao->elements)?(void*)((char*)glstate->vao->elements->data + (uintptr_t)indices):indices,
type, 1, 0, GL_UNSIGNED_INT, 1, 0, count, NULL);
else
sindices = copy_gl_array((glstate->vao->elements)?(void*)((char*)glstate->vao->elements->data + (uintptr_t)indices):indices,
type, 1, 0, GL_UNSIGNED_SHORT, 1, 0, count, NULL);
if (compiling) {
// TODO, handle uint indices
renderlist_t *list = NULL;
GLsizei min, max;
NewStage(glstate->list.active, STAGE_DRAW);
list = glstate->list.active;
normalize_indices_us(sindices, &max, &min, count);
list = arrays_to_renderlist(list, mode, min + basevertex, max + basevertex + 1);
list->indices = sindices;
list->ilen = count;
list->indice_cap = count;
//end_renderlist(list);
if(glstate->list.pending) {
NewStage(glstate->list.active, STAGE_POSTDRAW);
} else {
glstate->list.active = extend_renderlist(list);
}
return;
}
if (intercept) {
//TODO handling uint indices
renderlist_t *list = NULL;
GLsizei min, max;
normalize_indices_us(sindices, &max, &min, count);
list = arrays_to_renderlist(list, mode, min + basevertex, max + basevertex + 1);
list->indices = sindices;
list->ilen = count;
list->indice_cap = count;
list = end_renderlist(list);
draw_renderlist(list);
free_renderlist(list);
return;
} else {
if(iindices)
for(int i=0; i<count; i++) iindices[i]+=basevertex;
else
for(int i=0; i<count; i++) sindices[i]+=basevertex;
glDrawElementsCommon(mode, 0, count, 0, sindices, iindices, 1);
if(iindices)
free(iindices);
else
free(sindices);
}
}
}
AliasExport(void,glDrawElementsBaseVertex,,(GLenum mode, GLsizei count, GLenum type, const void *indices, GLint basevertex));
AliasExport(void,glDrawElementsBaseVertex,ARB,(GLenum mode, GLsizei count, GLenum type, const void *indices, GLint basevertex));
void APIENTRY_GL4ES gl4es_glDrawRangeElementsBaseVertex(GLenum mode, GLuint start, GLuint end, GLsizei count, GLenum type, const void *indices, GLint basevertex) {
DBG(printf("glDrawRangeElementsBaseVertex(%s, %i, %i, %i, %s, @%p, %d), inlist=%i, pending=%d\n", PrintEnum(mode), start, end, count, PrintEnum(type), indices, basevertex, (glstate->list.active)?1:0, glstate->list.pending);)
if(basevertex==0) {
gl4es_glDrawRangeElements(mode, start, end, count, type, indices);
} else {
count = adjust_vertices(mode, count);
if (count<0) {
errorShim(GL_INVALID_VALUE);
return;
}
if (count==0) {
noerrorShim();
return;
}
bool compiling = (glstate->list.active);
bool intercept = should_intercept_render(mode);
//BATCH Mode
if(!compiling) {
if(!intercept && glstate->list.pending && count>MAX_BATCH) // too large and will not intercept, stop the BATCH
gl4es_flush();
else if((!intercept && !glstate->list.pending && count<MIN_BATCH)
|| (intercept && globals4es.maxbatch)) {
compiling = true;
glstate->list.pending = 1;
glstate->list.active = alloc_renderlist();
}
}
noerrorShim();
GLushort *sindices = NULL;
GLuint *iindices = NULL;
if(type==GL_UNSIGNED_INT && hardext.elementuint && !compiling && !intercept)
iindices = copy_gl_array((glstate->vao->elements)?(void*)((char*)glstate->vao->elements->data + (uintptr_t)indices):indices,
type, 1, 0, GL_UNSIGNED_INT, 1, 0, count, NULL);
else
sindices = copy_gl_array((glstate->vao->elements)?(void*)((char*)glstate->vao->elements->data + (uintptr_t)indices):indices,
type, 1, 0, GL_UNSIGNED_SHORT, 1, 0, count, NULL);
if (compiling) {
// TODO, handle uint indices
renderlist_t *list = NULL;
NewStage(glstate->list.active, STAGE_DRAW);
list = glstate->list.active;
for (int i=0; i<count; i++) sindices[i]-=start;
list = arrays_to_renderlist(list, mode, start + basevertex, end + basevertex + 1);
list->indices = sindices;
list->ilen = count;
list->indice_cap = count;
//end_renderlist(list);
if(glstate->list.pending) {
NewStage(glstate->list.active, STAGE_POSTDRAW);
} else {
glstate->list.active = extend_renderlist(list);
}
return;
}
if (intercept) {
//TODO handling uint indices
renderlist_t *list = NULL;
for (int i=0; i<count; i++) sindices[i]-=start;
list = arrays_to_renderlist(list, mode, start + basevertex, end + basevertex + 1);
list->indices = sindices;
list->ilen = count;
list->indice_cap = count;
list = end_renderlist(list);
draw_renderlist(list);
free_renderlist(list);
return;
} else {
if(iindices)
for(int i=0; i<count; i++) iindices[i]+=basevertex;
else
for(int i=0; i<count; i++) sindices[i]+=basevertex;
GLuint old_index = wantBufferIndex(0);
glDrawElementsCommon(mode, 0, count, end+basevertex+1, sindices, iindices, 1);
if(iindices)
free(iindices);
else
free(sindices);
wantBufferIndex(old_index);
}
}
}
AliasExport(void,glDrawRangeElementsBaseVertex,,(GLenum mode, GLuint start, GLuint end, GLsizei count, GLenum type, const void *indices, GLint basevertex));
AliasExport(void,glDrawRangeElementsBaseVertex,ARB,(GLenum mode, GLuint start, GLuint end, GLsizei count, GLenum type, const void *indices, GLint basevertex));
void APIENTRY_GL4ES gl4es_glDrawArraysInstanced(GLenum mode, GLint first, GLsizei count, GLsizei primcount) {
DBG(printf("glDrawArraysInstanced(%s, %d, %d, %d), list=%p pending=%d\n", PrintEnum(mode), first, count, primcount, glstate->list.active, glstate->list.pending);)
count = adjust_vertices(mode, count);
if (count<0) {
errorShim(GL_INVALID_VALUE);
return;
}
if (count==0) {
noerrorShim();
return;
}
// special case for (very) large GL_QUADS array
if ((mode==GL_QUADS) && (count>4*8000)) {
// split the array in manageable slice
int cnt = 4*8000;
for (int i=0; i<count; i+=4*8000) {
if (i+cnt>count) cnt = count-i;
gl4es_glDrawArrays(mode, i, cnt);
}
return;
}
noerrorShim();
bool intercept = should_intercept_render(mode);
//BATCH Mode
if (!glstate->list.compiling) {
if((!intercept && !glstate->list.pending && (count>=MIN_BATCH && count<=MAX_BATCH))
|| (intercept && globals4es.maxbatch)) {
glstate->list.pending = 1;
glstate->list.active = alloc_renderlist();
}
}
if (glstate->list.active) {
NewStage(glstate->list.active, STAGE_DRAW);
glstate->list.active = arrays_to_renderlist(glstate->list.active, mode, first, count+first);
glstate->list.active->instanceCount = primcount;
if(glstate->list.pending) {
NewStage(glstate->list.active, STAGE_POSTDRAW);
} else {
glstate->list.active = extend_renderlist(glstate->list.active);
}
return;
}
/*if (glstate->polygon_mode == GL_LINE && mode>=GL_TRIANGLES)
mode = GL_LINE_LOOP;*/
if (glstate->polygon_mode == GL_POINT && mode>=GL_TRIANGLES)
mode = GL_POINTS;
if (intercept) {
renderlist_t *list = NULL;
list = arrays_to_renderlist(list, mode, first, count+first);
list->instanceCount = primcount;
list = end_renderlist(list);
draw_renderlist(list);
free_renderlist(list);
} else {
if (mode==GL_QUADS) {
// TODO: move those static in glstate
static GLushort *indices = NULL;
static int indcnt = 0;
static int indfirst = 0;
int realfirst = ((first%4)==0)?0:first;
int realcount = count + (first-realfirst);
if((indcnt < realcount) || (indfirst!=realfirst)) {
if(indcnt < realcount) {
indcnt = realcount;
if (indices) free(indices);
indices = (GLushort*)malloc(sizeof(GLushort)*(indcnt*3/2));
}
indfirst = realfirst;
GLushort *p = indices;
for (int i=0, j=indfirst; i+3<indcnt; i+=4, j+=4) {
*(p++) = j + 0;
*(p++) = j + 1;
*(p++) = j + 2;
*(p++) = j + 0;
*(p++) = j + 2;
*(p++) = j + 3;
}
}
GLuint old_buffer = wantBufferIndex(0);
glDrawElementsCommon(GL_TRIANGLES, 0, count*3/2, count, indices+(first-indfirst)*3/2, NULL, primcount);
wantBufferIndex(old_buffer);
return;
}
glDrawElementsCommon(mode, first, count, count, NULL, NULL, primcount);
}
}
AliasExport(void,glDrawArraysInstanced,,(GLenum mode, GLint first, GLsizei count, GLsizei primcount));
AliasExport(void,glDrawArraysInstanced,ARB,(GLenum mode, GLint first, GLsizei count, GLsizei primcount));
void APIENTRY_GL4ES gl4es_glDrawElementsInstanced(GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei primcount) {
DBG(printf("glDrawElementsInstanced(%s, %d, %s, %p, %d), list=%p pending=%d\n", PrintEnum(mode), count, PrintEnum(type), indices, primcount, glstate->list.active, glstate->list.pending);)
count = adjust_vertices(mode, count);
if (count<0) {
errorShim(GL_INVALID_VALUE);
return;
}
if (count==0) {
noerrorShim();
return;
}
bool compiling = (glstate->list.active);
bool intercept = should_intercept_render(mode);
//BATCH Mode
if(!compiling) {
if((!intercept && !glstate->list.pending && (count>=MIN_BATCH && count<=MAX_BATCH))
|| (intercept && globals4es.maxbatch)) {
compiling = true;
glstate->list.pending = 1;
glstate->list.active = alloc_renderlist();
}
}
noerrorShim();
GLushort *sindices = NULL;
GLuint *iindices = NULL;
GLuint old_index = 0;
bool need_free = !(
(type==GL_UNSIGNED_SHORT) ||
(!compiling && !intercept && type==GL_UNSIGNED_INT && hardext.elementuint)
);
if(need_free) {
sindices = copy_gl_array((glstate->vao->elements)?((void*)((char*)glstate->vao->elements->data + (uintptr_t)indices)):indices,
type, 1, 0, GL_UNSIGNED_SHORT, 1, 0, count, NULL);
old_index = wantBufferIndex(0);
} else {
if(type==GL_UNSIGNED_INT)
iindices = (glstate->vao->elements)?((void*)((char*)glstate->vao->elements->data + (uintptr_t)indices)):(GLvoid*)indices;
else
sindices = (glstate->vao->elements)?((void*)((char*)glstate->vao->elements->data + (uintptr_t)indices)):(GLvoid*)indices;
}
if (compiling) {
// TODO, handle uint indices
renderlist_t *list = NULL;
GLsizei min, max;
NewStage(glstate->list.active, STAGE_DRAW);
list = glstate->list.active;
if(!need_free) {
GLushort *tmp = sindices;
sindices = (GLushort*)malloc(count*sizeof(GLushort));
memcpy(sindices, tmp, count*sizeof(GLushort));
}
normalize_indices_us(sindices, &max, &min, count);
list = arrays_to_renderlist(list, mode, min, max + 1);
list->indices = sindices;
list->ilen = count;
list->indice_cap = count;
list->instanceCount = primcount;
//end_renderlist(list);
if(glstate->list.pending) {
NewStage(glstate->list.active, STAGE_POSTDRAW);
} else {
glstate->list.active = extend_renderlist(list);
}
return;
}
if (intercept) {
//TODO handling uint indices
renderlist_t *list = NULL;
GLsizei min, max;
if(!need_free) {
GLushort *tmp = sindices;
sindices = (GLushort*)malloc(count*sizeof(GLushort));
memcpy(sindices, tmp, count*sizeof(GLushort));
}
normalize_indices_us(sindices, &max, &min, count);
list = arrays_to_renderlist(list, mode, min, max + 1);
list->indices = sindices;
list->ilen = count;
list->indice_cap = count;
list->instanceCount = primcount;
list = end_renderlist(list);
draw_renderlist(list);
free_renderlist(list);
return;
} else {
glDrawElementsCommon(mode, 0, count, 0, sindices, iindices, primcount);
if(need_free) {
free(sindices);
wantBufferIndex(old_index);
}
}
}
AliasExport(void,glDrawElementsInstanced,,(GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei primcount));
AliasExport(void,glDrawElementsInstanced,ARB,(GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei primcount));
void APIENTRY_GL4ES gl4es_glDrawElementsInstancedBaseVertex(GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei primcount, GLint basevertex) {
DBG(printf("glDrawElementsInstanceBaseVertex(%s, %d, %s, %p, %d, %d), vtx=%p map=%p, pending=%d\n", PrintEnum(mode), count, PrintEnum(type), indices, primcount, basevertex, (glstate->vao->vertex)?glstate->vao->vertex->data:NULL, (glstate->vao->elements)?glstate->vao->elements->data:NULL, glstate->list.pending);)
if(basevertex==0)
gl4es_glDrawElementsInstanced(mode, count, type, indices, primcount);
else {
count = adjust_vertices(mode, count);
if (count<0) {
errorShim(GL_INVALID_VALUE);
return;
}
if (count==0) {
noerrorShim();
return;
}
bool compiling = (glstate->list.active);
bool intercept = should_intercept_render(mode);
//BATCH Mode
if(!compiling) {
if(!intercept && glstate->list.pending && count>MAX_BATCH) // too large and will not intercept, stop the BATCH
gl4es_flush();
else if((!intercept && !glstate->list.pending && (count>=MIN_BATCH && count<=MAX_BATCH))
|| (intercept && globals4es.maxbatch)) {
compiling = true;
glstate->list.pending = 1;
glstate->list.active = alloc_renderlist();
}
}
noerrorShim();
GLushort *sindices = NULL;
GLuint *iindices = NULL;
if(type==GL_UNSIGNED_INT && hardext.elementuint && !compiling && !intercept)
iindices = copy_gl_array((glstate->vao->elements)?(void*)((char*)glstate->vao->elements->data + (uintptr_t)indices):indices,
type, 1, 0, GL_UNSIGNED_INT, 1, 0, count, NULL);
else
sindices = copy_gl_array((glstate->vao->elements)?(void*)((char*)glstate->vao->elements->data + (uintptr_t)indices):indices,
type, 1, 0, GL_UNSIGNED_SHORT, 1, 0, count, NULL);
if (compiling) {
// TODO, handle uint indices
renderlist_t *list = NULL;
GLsizei min, max;
NewStage(glstate->list.active, STAGE_DRAW);
list = glstate->list.active;
normalize_indices_us(sindices, &max, &min, count);
list = arrays_to_renderlist(list, mode, min + basevertex, max + basevertex + 1);
list->indices = sindices;
list->ilen = count;
list->indice_cap = count;
list->instanceCount = primcount;
//end_renderlist(list);
if(glstate->list.pending) {
NewStage(glstate->list.active, STAGE_POSTDRAW);
} else {
glstate->list.active = extend_renderlist(list);
}
return;
}
if (intercept) {
//TODO handling uint indices
renderlist_t *list = NULL;
GLsizei min, max;
normalize_indices_us(sindices, &max, &min, count);
list = arrays_to_renderlist(list, mode, min + basevertex, max + basevertex + 1);
list->indices = sindices;
list->ilen = count;
list->indice_cap = count;
list->instanceCount = primcount;
list = end_renderlist(list);
draw_renderlist(list);
free_renderlist(list);
return;
} else {
if(iindices)
for(int i=0; i<count; i++) iindices[i]+=basevertex;
else
for(int i=0; i<count; i++) sindices[i]+=basevertex;
GLuint old_index = wantBufferIndex(0);
glDrawElementsCommon(mode, 0, count, 0, sindices, iindices, primcount);
if(iindices)
free(iindices);
else
free(sindices);
wantBufferIndex(old_index);
}
}
}
AliasExport(void,glDrawElementsInstancedBaseVertex,,(GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei primcount, GLint basevertex));
AliasExport(void,glDrawElementsInstancedBaseVertex,ARB,(GLenum mode, GLsizei count, GLenum type, const void *indices, GLsizei primcount, GLint basevertex));
| {'content_hash': 'eac79ed7bf7f02e5d7b6498347d6fe21', 'timestamp': '', 'source': 'github', 'line_count': 1608, 'max_line_length': 317, 'avg_line_length': 41.45646766169154, 'alnum_prop': 0.5591191383396837, 'repo_name': 'ptitSeb/gl4es', 'id': '9844ca10aa2ba82bb415437af8b68f5e7eb30d0a', 'size': '66662', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'src/gl/drawing.c', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'C', 'bytes': '4114733'}, {'name': 'C++', 'bytes': '17255'}, {'name': 'CMake', 'bytes': '21549'}, {'name': 'Jinja', 'bytes': '8992'}, {'name': 'Makefile', 'bytes': '2200'}, {'name': 'NASL', 'bytes': '5104'}, {'name': 'Python', 'bytes': '7694'}, {'name': 'Shell', 'bytes': '5739'}]} |
/*
* Provides methods to store and retrieve data based on Web Storage API.
*/
sap.ui.define(['jquery.sap.global'],
function(jQuery) {
"use strict";
/**
* Check whether the current environment supports JSON.parse and JSON stringify.
* @private
*/
var bSupportJSON = !!(window.JSON && JSON.parse && JSON.stringify);
/**
* Prefix added to all storage keys (typically IDs) passed by the applications
* when they are calling state storage methods. The goal of such prefix is to
* leave space for saving data (with the same key) also scenarios other than
* state saving.
* @private
*/
var sStateStorageKeyPrefix = "state.key_";
/**
* @interface A Storage API for JavaScript.
*
* Provides methods to store data on the client using Web Storage API support by the browser. The data
* received by this API must be already serialized, in string format. Similarly, the API returns the retrieved
* data in serialized string format, so it is the responsibility of the caller to de-serialize it, if applicable.
*
* Attention: The Web Storage API stores the data on the client. Therefore do not use this API for confidential information.
*
* One can get access to the 'default' storage by using {@link jQuery.sap.storage} directly
* or alternatively via factory functionality available as <code>jQuery.sap.storage(jQuery.sap.storage.Type.session)</code>
* returning an object implementing this interface.
*
* A typical intended usage of this API is the storage of a string representing the state of a control.
* In such usage, the data is stored in the browser session, and
* the methods to be used are {@link #put} and {@link #get}.
* The method {@link #remove} can be used to delete the previously saved state.
*
* In sake of completeness, the method {@link #clear} is available.
* However, it should be called only in very particular situations,
* when a global erasing of data is required. If only keys with certain prefix
* should be deleted the method {@link #removeAll} should be used.
*
* @author SAP SE
* @version 1.30.4-SNAPSHOT
* @since 0.11.0
* @public
* @name jQuery.sap.storage.Storage
*/
/**
*
* Constructor for an instance of jQuery.sap.storage.Storage
*
* @param {jQuery.sap.storage.Type | Storage} [pStorage=jQuery.sap.storage.Type.session] the type this storage should be of or an Object implementing the typical Storage API for direct usage.
* @param {string} [sStorageKeyPrefix='state.key_'] the prefix to use in this storage.
*
* @private
*/
var fnStorage = function(pStorage, sStorageKeyPrefix){
var sType = "unknown",
sPrefix = sStorageKeyPrefix || sStateStorageKeyPrefix;
sPrefix += "-";
var sTestKey = sPrefix + "___sapui5TEST___",
oStorage;
if (!pStorage || typeof (pStorage) === "string") {
sType = pStorage || "session";
try {
oStorage = window[sType + "Storage"];
} catch (e) {
oStorage = null;
}
try { // Test for QUOTA_EXCEEDED_ERR (Happens e.g. in mobile Safari when private browsing active)
if (oStorage) {
oStorage.setItem(sTestKey, "1");
oStorage.removeItem(sTestKey);
}
} catch (e) {
oStorage = null;
}
} else if (typeof (pStorage) === "object") {
sType = pStorage.getType ? pStorage.getType() : "unknown";
oStorage = pStorage;
}
var bStorageAvailable = !!oStorage;
/**
* Returns whether the given storage is suppported.
*
* @return {boolean} true if storage is supported, false otherwise (e.g. due to browser security settings)
* @public
* @name jQuery.sap.storage.Storage#isSupported
* @function
*/
this.isSupported = function(){
if (!bStorageAvailable) { //No storage available at all or not accessible
return false;
}
if (typeof (oStorage.isSupported) == "function") { //Possibility to define for custom storage
return oStorage.isSupported();
}
return true;
};
/**
* Stores the passed state string in the session, under the key
* sStorageKeyPrefix + sId.
*
* sStorageKeyPrefix is the id prefix defined for the storage instance (@see jQuery.sap#storage)
*
* @param {string} sId Id for the state to store
* @param {string} sStateToStore content to store
* @return {boolean} true if the data were successfully stored, false otherwise
* @public
* @name jQuery.sap.storage.Storage#put
* @function
*/
this.put = function(sId, sStateToStore) {
//precondition: non-empty sId and available storage feature
jQuery.sap.assert(typeof sId === "string" && sId, "sId must be a non-empty string");
jQuery.sap.assert(typeof sStateToStore === "string" || bSupportJSON, "sStateToStore must be string or JSON must be supported");
if (this.isSupported() && sId) {
try {
oStorage.setItem(sPrefix + sId, bSupportJSON ? JSON.stringify(sStateToStore) : sStateToStore);
return true;
} catch (e) {
return false;
}
} else {
return false;
}
};
/**
* Retrieves the state string stored in the session under the key
* sStorageKeyPrefix + sId.
*
* sStorageKeyPrefix is the id prefix defined for the storage instance (@see jQuery.sap#storage)
*
* @param {string} sId Id for the state to retrieve
* @return {string} the string from the storage, if the retrieval
* was successful, and null otherwise
* @public
* @name jQuery.sap.storage.Storage#get
* @function
*/
this.get = function(sId) {
//precondition: non-empty sId and available storage feature
jQuery.sap.assert(typeof sId === "string" && sId, "sId must be a non-empty string");
if (this.isSupported() && sId ) {
try {
var sItem = oStorage.getItem(sPrefix + sId);
return bSupportJSON ? JSON.parse(sItem) : sItem;
} catch (e) {
return null;
}
} else {
return null;
}
};
/**
* Deletes the state string stored in the session under the key
* sStorageKeyPrefix + sId.s
*
* sStorageKeyPrefix is the id prefix defined for the storage instance (@see jQuery.sap#storage)
*
* @param {string} sId Id for the state to delete
* @return {boolean} true if the deletion
* was successful or the data doesn't exist under the specified key,
* and false if the feature is unavailable or a problem occurred
* @public
* @name jQuery.sap.storage.Storage#remove
* @function
*/
this.remove = function(sId) {
//precondition: non-empty sId and available storage feature
jQuery.sap.assert(typeof sId === "string" && sId, "sId must be a non-empty string");
if (this.isSupported() && sId) {
try {
oStorage.removeItem(sPrefix + sId);
return true;
} catch (e) {
return false;
}
} else {
return false;
}
};
/**
* Deletes all state strings stored in the session under the key prefix
* sStorageKeyPrefix + sIdPrefix.
*
* sStorageKeyPrefix is the id prefix defined for the storage instance (@see jQuery.sap#storage)
*
* @param {string} sIdPrefix Id prefix for the states to delete
* @return {boolean} true if the deletion
* was successful or the data doesn't exist under the specified key,
* and false if the feature is unavailable or a problem occurred
* @since 1.13.0
* @public
* @name jQuery.sap.storage.Storage#removeAll
* @function
*/
this.removeAll = function(sIdPrefix) {
//precondition: available storage feature (in case of IE8 typeof native functions returns "object")
if (this.isSupported() && oStorage.length && (document.addEventListener ? /function/ : /function|object/).test(typeof (oStorage.key))) {
try {
var len = oStorage.length;
var aKeysToRemove = [];
var key, i;
var p = sPrefix + (sIdPrefix || "");
for (i = 0; i < len; i++) {
key = oStorage.key(i);
if (key && key.indexOf(p) == 0) {
aKeysToRemove.push(key);
}
}
for (i = 0; i < aKeysToRemove.length; i++) {
oStorage.removeItem(aKeysToRemove[i]);
}
return true;
} catch (e) {
return false;
}
} else {
return false;
}
};
/**
* Deletes all the entries saved in the session (Independent of the current Storage instance!).
*
* CAUTION: This method should be called only in very particular situations,
* when a global erasing of data is required. Given that the method deletes
* the data saved under any ID, it should not be called when managing data
* for specific controls.
*
* @return {boolean} true if execution of removal
* was successful or the data to remove doesn't exist,
* and false if the feature is unavailable or a problem occurred
* @public
* @name jQuery.sap.storage.Storage#clear
* @function
*/
this.clear = function() {
//precondition: available storage feature
if (this.isSupported()) {
try {
oStorage.clear();
return true;
} catch (e) {
return false;
}
} else {
return false;
}
};
/**
* Returns the type of the storage.
* @returns {jQuery.sap.storage.Type | string} the type of the storage or "unknown"
* @public
* @name jQuery.sap.storage.Storage#getType
* @function
*/
this.getType = function(){
return sType;
};
};
/**
* A map holding instances of different 'standard' storages.
* Used to limit number of created storage objects.
* @private
*/
var mStorages = {};
/**
* Returns a {@link jQuery.sap.storage.Storage Storage} object for a given HTML5 storage (type) and,
* as a convenience, provides static functions to access the default (session) storage.
*
* When called as a function, it returns an instance of {@link jQuery.sap.storage.Storage}, providing access
* to the storage of the given {@link jQuery.sap.storage.Type} or to the given HTML5 Storage object.
*
* The default session storage can be easily accessed with methods {@link jQuery.sap.storage.get},
* {@link jQuery.sap.storage.put}, {@link jQuery.sap.storage.remove}, {@link jQuery.sap.storage.clear},
* {@link jQuery.sap.storage.getType} and {@link jQuery.sap.storage.removeAll}
*
* @param {jQuery.sap.storage.Type | Storage}
* oStorage the type specifying the storage to use or an object implementing the browser's Storage API.
* @param {string} [sIdPrefix] Prefix used for the Ids. If not set a default prefix is used.
* @returns {jQuery.sap.storage.Storage}
*
* @version 1.30.4-SNAPSHOT
* @since 0.11.0
* @namespace
* @type Function
* @public
*
* @borrows jQuery.sap.storage.Storage#get as get
* @borrows jQuery.sap.storage.Storage#put as put
* @borrows jQuery.sap.storage.Storage#remove as remove
* @borrows jQuery.sap.storage.Storage#clear as clear
* @borrows jQuery.sap.storage.Storage#getType as getType
* @borrows jQuery.sap.storage.Storage#removeAll as removeAll
* @borrows jQuery.sap.storage.Storage#isSupported as isSupported
*/
jQuery.sap.storage = function(oStorage, sIdPrefix){
// if nothing or the default was passed in, simply return ourself
if (!oStorage) {
oStorage = jQuery.sap.storage.Type.session;
}
if (typeof (oStorage) === "string" && jQuery.sap.storage.Type[oStorage]) {
var sKey = oStorage;
if (sIdPrefix && sIdPrefix != sStateStorageKeyPrefix) {
sKey = oStorage + "_" + sIdPrefix;
}
return mStorages[sKey] || (mStorages[sKey] = new fnStorage(oStorage, sIdPrefix));
}
// OK, tough but probably good for issue identification. As something was passed in, let's at least ensure our used API is fulfilled.
jQuery.sap.assert(oStorage instanceof Object && oStorage.clear && oStorage.setItem && oStorage.getItem && oStorage.removeItem, "storage: duck typing the storage");
return new fnStorage(oStorage, sIdPrefix);
};
/**
* Enumeration of the storage types supported by {@link jQuery.sap.storage.Storage}
* @class
* @static
* @public
* @version 1.30.4-SNAPSHOT
* @since 0.11.0
*/
jQuery.sap.storage.Type = {
/**
* Indicates usage of the browser's localStorage feature
* @public
*/
local: "local",
/**
* Indicates usage of the browser's sessionStorage feature
* @public
*/
session: "session",
/**
* Indicates usage of the browser's globalStorage feature
* @public
*/
global: "global"
};
// ensure the storage constructor applied to our storage object
fnStorage.apply(jQuery.sap.storage);
mStorages[jQuery.sap.storage.Type.session] = jQuery.sap.storage;
return jQuery;
});
| {'content_hash': '02d310ddece3817a52d37d9f30268e90', 'timestamp': '', 'source': 'github', 'line_count': 372, 'max_line_length': 192, 'avg_line_length': 33.586021505376344, 'alnum_prop': 0.672082599647831, 'repo_name': 'fconFGDCA/DetailCADA', 'id': '4a3dcdcbb8e4b42d9d75e5e10198ba25922c87d3', 'size': '12690', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'resources/jquery.sap.storage-dbg.js', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'CSS', 'bytes': '3946795'}, {'name': 'HTML', 'bytes': '2428'}, {'name': 'JavaScript', 'bytes': '26395873'}]} |
(function ($) {
$.fn.selectpicker.defaults = {
noneSelectedText: 'Nothing selected',
noneResultsText: 'No results match {0}',
countSelectedText: function (numSelected, numTotal) {
return (numSelected == 1) ? "{0} item selected" : "{0} items selected";
},
maxOptionsText: function (numAll, numGroup) {
var arr = [];
arr[0] = (numAll == 1) ? 'Limit reached ({n} item max)' : 'Limit reached ({n} items max)';
arr[1] = (numGroup == 1) ? 'Group limit reached ({n} item max)' : 'Group limit reached ({n} items max)';
return arr;
},
selectAllText: 'Select All',
deselectAllText: 'Deselect All',
multipleSeparator: ', '
};
}(jQuery));
| {'content_hash': '39362c8a87140bcf8bb0e3100ae0b8f8', 'timestamp': '', 'source': 'github', 'line_count': 21, 'max_line_length': 110, 'avg_line_length': 33.333333333333336, 'alnum_prop': 0.5971428571428572, 'repo_name': 'tonyseek/bootstrap-select', 'id': '10d24a2d150956e0348565853f0c9489a389e678', 'size': '919', 'binary': False, 'copies': '3', 'ref': 'refs/heads/master', 'path': 'dist/js/i18n/defaults-en_US.js', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '5977'}, {'name': 'JavaScript', 'bytes': '27140'}]} |
const electron = require('electron');
// Module to control application life.
const app = electron.app;
// Module to create native browser window.
const BrowserWindow = electron.BrowserWindow;
const path = require('path');
const url = require('url');
// Keep a global reference of the window object, if you don't, the window will
// be closed automatically when the JavaScript object is garbage collected.
let mainWindow;
function createWindow () {
// Create the browser window.
mainWindow = new BrowserWindow({width: 800, height: 600});
const options = {};
switch (process.env.NODE_ENV) {
case 'development': {
const rikoconfig = require('./rikoconfig')();
const { webpackConfig } = rikoconfig.setWebpackConfig('electron');
const SERVER_PORT = webpackConfig.devServer.port || 3000;
options.host = `localhost:${SERVER_PORT}`;
options.protocol = 'http:';
options.slashes = true;
break;
}
default: {
options.pathname = path.join(__dirname, 'index.html');
options.protocol = 'file:';
options.slashes = true;
break;
}
}
// and load the index.html of the app.
mainWindow.loadURL(url.format(options));
if(process.env.NODE_ENV === 'development') {
// Open the DevTools.
mainWindow.webContents.openDevTools();
}
// Emitted when the window is closed.
mainWindow.on('closed', function () {
// Dereference the window object, usually you would store windows
// in an array if your app supports multi windows, this is the time
// when you should delete the corresponding element.
mainWindow = null;
});
}
// This method will be called when Electron has finished
// initialization and is ready to create browser windows.
// Some APIs can only be used after this event occurs.
app.on('ready', createWindow);
// Quit when all windows are closed.
app.on('window-all-closed', function () {
// On OS X it is common for applications and their menu bar
// to stay active until the user quits explicitly with Cmd + Q
if (process.platform !== 'darwin') {
app.quit();
}
});
app.on('activate', function () {
// On OS X it's common to re-create a window in the app when the
// dock icon is clicked and there are no other windows open.
if (mainWindow === null) {
createWindow();
}
});
// In this file you can include the rest of your app's specific main process
// code. You can also put them in separate files and require them here. | {'content_hash': 'a149965e3dc010c49dd5f802ba49fea1', 'timestamp': '', 'source': 'github', 'line_count': 79, 'max_line_length': 78, 'avg_line_length': 31.443037974683545, 'alnum_prop': 0.677133655394525, 'repo_name': 'Donmclean/riko', 'id': '9fec4c99d64e18ad3a671c1df5a86a50d5081a21', 'size': '2484', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'bin/_setup/electron/electron.js', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '774'}, {'name': 'HTML', 'bytes': '3533'}, {'name': 'JavaScript', 'bytes': '91944'}, {'name': 'Shell', 'bytes': '4229'}]} |
namespace android.view.inputmethod
{
[global::MonoJavaBridge.JavaClass()]
public sealed partial class CompletionInfo : java.lang.Object, android.os.Parcelable
{
internal new static global::MonoJavaBridge.JniGlobalHandle staticClass;
static CompletionInfo()
{
InitJNI();
}
internal CompletionInfo(global::MonoJavaBridge.JNIEnv @__env) : base(@__env)
{
}
internal static global::MonoJavaBridge.MethodId _toString10102;
public sealed override global::java.lang.String toString()
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (!IsClrObject)
return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallObjectMethod(this.JvmHandle, global::android.view.inputmethod.CompletionInfo._toString10102)) as java.lang.String;
else
return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallNonVirtualObjectMethod(this.JvmHandle, global::android.view.inputmethod.CompletionInfo.staticClass, global::android.view.inputmethod.CompletionInfo._toString10102)) as java.lang.String;
}
internal static global::MonoJavaBridge.MethodId _getId10103;
public long getId()
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (!IsClrObject)
return @__env.CallLongMethod(this.JvmHandle, global::android.view.inputmethod.CompletionInfo._getId10103);
else
return @__env.CallNonVirtualLongMethod(this.JvmHandle, global::android.view.inputmethod.CompletionInfo.staticClass, global::android.view.inputmethod.CompletionInfo._getId10103);
}
internal static global::MonoJavaBridge.MethodId _getPosition10104;
public int getPosition()
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (!IsClrObject)
return @__env.CallIntMethod(this.JvmHandle, global::android.view.inputmethod.CompletionInfo._getPosition10104);
else
return @__env.CallNonVirtualIntMethod(this.JvmHandle, global::android.view.inputmethod.CompletionInfo.staticClass, global::android.view.inputmethod.CompletionInfo._getPosition10104);
}
internal static global::MonoJavaBridge.MethodId _getText10105;
public global::java.lang.CharSequence getText()
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (!IsClrObject)
return global::MonoJavaBridge.JavaBridge.WrapIJavaObject<global::java.lang.CharSequence>(@__env.CallObjectMethod(this.JvmHandle, global::android.view.inputmethod.CompletionInfo._getText10105)) as java.lang.CharSequence;
else
return global::MonoJavaBridge.JavaBridge.WrapIJavaObject<global::java.lang.CharSequence>(@__env.CallNonVirtualObjectMethod(this.JvmHandle, global::android.view.inputmethod.CompletionInfo.staticClass, global::android.view.inputmethod.CompletionInfo._getText10105)) as java.lang.CharSequence;
}
internal static global::MonoJavaBridge.MethodId _writeToParcel10106;
public void writeToParcel(android.os.Parcel arg0, int arg1)
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (!IsClrObject)
@__env.CallVoidMethod(this.JvmHandle, global::android.view.inputmethod.CompletionInfo._writeToParcel10106, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1));
else
@__env.CallNonVirtualVoidMethod(this.JvmHandle, global::android.view.inputmethod.CompletionInfo.staticClass, global::android.view.inputmethod.CompletionInfo._writeToParcel10106, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1));
}
internal static global::MonoJavaBridge.MethodId _describeContents10107;
public int describeContents()
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (!IsClrObject)
return @__env.CallIntMethod(this.JvmHandle, global::android.view.inputmethod.CompletionInfo._describeContents10107);
else
return @__env.CallNonVirtualIntMethod(this.JvmHandle, global::android.view.inputmethod.CompletionInfo.staticClass, global::android.view.inputmethod.CompletionInfo._describeContents10107);
}
internal static global::MonoJavaBridge.MethodId _getLabel10108;
public global::java.lang.CharSequence getLabel()
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (!IsClrObject)
return global::MonoJavaBridge.JavaBridge.WrapIJavaObject<global::java.lang.CharSequence>(@__env.CallObjectMethod(this.JvmHandle, global::android.view.inputmethod.CompletionInfo._getLabel10108)) as java.lang.CharSequence;
else
return global::MonoJavaBridge.JavaBridge.WrapIJavaObject<global::java.lang.CharSequence>(@__env.CallNonVirtualObjectMethod(this.JvmHandle, global::android.view.inputmethod.CompletionInfo.staticClass, global::android.view.inputmethod.CompletionInfo._getLabel10108)) as java.lang.CharSequence;
}
internal static global::MonoJavaBridge.MethodId _CompletionInfo10109;
public CompletionInfo(long arg0, int arg1, java.lang.CharSequence arg2) : base(global::MonoJavaBridge.JNIEnv.ThreadEnv)
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
global::MonoJavaBridge.JniLocalHandle handle = @__env.NewObject(android.view.inputmethod.CompletionInfo.staticClass, global::android.view.inputmethod.CompletionInfo._CompletionInfo10109, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg2));
Init(@__env, handle);
}
internal static global::MonoJavaBridge.MethodId _CompletionInfo10110;
public CompletionInfo(long arg0, int arg1, java.lang.CharSequence arg2, java.lang.CharSequence arg3) : base(global::MonoJavaBridge.JNIEnv.ThreadEnv)
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
global::MonoJavaBridge.JniLocalHandle handle = @__env.NewObject(android.view.inputmethod.CompletionInfo.staticClass, global::android.view.inputmethod.CompletionInfo._CompletionInfo10110, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg2), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg3));
Init(@__env, handle);
}
internal static global::MonoJavaBridge.FieldId _CREATOR10111;
public static global::android.os.Parcelable_Creator CREATOR
{
get
{
return default(global::android.os.Parcelable_Creator);
}
}
private static void InitJNI()
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
global::android.view.inputmethod.CompletionInfo.staticClass = @__env.NewGlobalRef(@__env.FindClass("android/view/inputmethod/CompletionInfo"));
global::android.view.inputmethod.CompletionInfo._toString10102 = @__env.GetMethodIDNoThrow(global::android.view.inputmethod.CompletionInfo.staticClass, "toString", "()Ljava/lang/String;");
global::android.view.inputmethod.CompletionInfo._getId10103 = @__env.GetMethodIDNoThrow(global::android.view.inputmethod.CompletionInfo.staticClass, "getId", "()J");
global::android.view.inputmethod.CompletionInfo._getPosition10104 = @__env.GetMethodIDNoThrow(global::android.view.inputmethod.CompletionInfo.staticClass, "getPosition", "()I");
global::android.view.inputmethod.CompletionInfo._getText10105 = @__env.GetMethodIDNoThrow(global::android.view.inputmethod.CompletionInfo.staticClass, "getText", "()Ljava/lang/CharSequence;");
global::android.view.inputmethod.CompletionInfo._writeToParcel10106 = @__env.GetMethodIDNoThrow(global::android.view.inputmethod.CompletionInfo.staticClass, "writeToParcel", "(Landroid/os/Parcel;I)V");
global::android.view.inputmethod.CompletionInfo._describeContents10107 = @__env.GetMethodIDNoThrow(global::android.view.inputmethod.CompletionInfo.staticClass, "describeContents", "()I");
global::android.view.inputmethod.CompletionInfo._getLabel10108 = @__env.GetMethodIDNoThrow(global::android.view.inputmethod.CompletionInfo.staticClass, "getLabel", "()Ljava/lang/CharSequence;");
global::android.view.inputmethod.CompletionInfo._CompletionInfo10109 = @__env.GetMethodIDNoThrow(global::android.view.inputmethod.CompletionInfo.staticClass, "<init>", "(JILjava/lang/CharSequence;)V");
global::android.view.inputmethod.CompletionInfo._CompletionInfo10110 = @__env.GetMethodIDNoThrow(global::android.view.inputmethod.CompletionInfo.staticClass, "<init>", "(JILjava/lang/CharSequence;Ljava/lang/CharSequence;)V");
}
}
}
| {'content_hash': '8f0f227c709885dbb39417df6dfcb9b9', 'timestamp': '', 'source': 'github', 'line_count': 114, 'max_line_length': 414, 'avg_line_length': 75.33333333333333, 'alnum_prop': 0.7934326967862133, 'repo_name': 'koush/androidmono', 'id': '98aca98957ceb82ac406a75d9f3acee4bb3a777b', 'size': '8588', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'jni/MonoJavaBridge/android/generated/android/view/inputmethod/CompletionInfo.cs', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'C', 'bytes': '147942'}, {'name': 'C#', 'bytes': '14843624'}, {'name': 'C++', 'bytes': '1162'}, {'name': 'Java', 'bytes': '34681'}, {'name': 'Shell', 'bytes': '3576'}]} |
import uuid
import ddt
import mock
from oslo_config import cfg
from rally import exceptions
from rally.plugins.openstack import service
from rally.plugins.openstack.services.storage import block
from rally.plugins.openstack.services.storage import cinder_common
from tests.unit import fakes
from tests.unit import test
BASE_PATH = "rally.plugins.openstack.services.storage"
CONF = cfg.CONF
class FullCinder(service.Service, cinder_common.CinderMixin):
"""Implementation of CinderMixin with Service base class."""
pass
@ddt.ddt
class CinderMixinTestCase(test.ScenarioTestCase):
def setUp(self):
super(CinderMixinTestCase, self).setUp()
self.clients = mock.MagicMock()
self.cinder = self.clients.cinder.return_value
self.name_generator = uuid.uuid1
self.version = "some"
self.service = FullCinder(
clients=self.clients, name_generator=self.name_generator)
self.service.version = self.version
def atomic_actions(self):
return self.service._atomic_actions
def test__get_client(self):
self.assertEqual(self.cinder,
self.service._get_client())
def test__update_resource_with_manage(self):
resource = mock.MagicMock(id=1, manager=mock.MagicMock())
self.assertEqual(resource.manager.get.return_value,
self.service._update_resource(resource))
resource.manager.get.assert_called_once_with(
resource.id)
@ddt.data({"resource": block.Volume(id=1, name="vol",
size=1, status="st"),
"attr": "volumes"},
{"resource": block.VolumeSnapshot(id=2, name="snapshot",
volume_id=1, status="st"),
"attr": "volume_snapshots"},
{"resource": block.VolumeBackup(id=3, name="backup",
volume_id=1, status="st"),
"attr": "backups"})
@ddt.unpack
def test__update_resource_with_no_manage(self, resource, attr):
self.assertEqual(getattr(self.cinder, attr).get.return_value,
self.service._update_resource(resource))
getattr(self.cinder, attr).get.assert_called_once_with(
resource.id)
def test__update_resource_with_not_found(self):
manager = mock.MagicMock()
resource = fakes.FakeResource(manager=manager, status="ERROR")
class NotFoundException(Exception):
http_status = 404
manager.get = mock.MagicMock(side_effect=NotFoundException)
self.assertRaises(exceptions.GetResourceNotFound,
self.service._update_resource, resource)
def test__update_resource_with_http_exception(self):
manager = mock.MagicMock()
resource = fakes.FakeResource(manager=manager, status="ERROR")
class HTTPException(Exception):
pass
manager.get = mock.MagicMock(side_effect=HTTPException)
self.assertRaises(exceptions.GetResourceFailure,
self.service._update_resource, resource)
def test__wait_available_volume(self):
volume = fakes.FakeVolume()
self.assertEqual(self.mock_wait_for_status.mock.return_value,
self.service._wait_available_volume(volume))
self.mock_wait_for_status.mock.assert_called_once_with(
volume,
ready_statuses=["available"],
update_resource=self.service._update_resource,
timeout=CONF.benchmark.cinder_volume_create_timeout,
check_interval=CONF.benchmark.cinder_volume_create_poll_interval
)
def test_list_volumes(self):
self.assertEqual(self.cinder.volumes.list.return_value,
self.service.list_volumes())
self.cinder.volumes.list.assert_called_once_with(True)
def test_get_volume(self):
self.assertEqual(self.cinder.volumes.get.return_value,
self.service.get_volume(1))
self.cinder.volumes.get.assert_called_once_with(1)
@mock.patch("%s.block.BlockStorage.create_volume" % BASE_PATH)
def test_delete_volume(self, mock_create_volume):
volume = mock_create_volume.return_value
self.service.delete_volume(volume)
self.cinder.volumes.delete.assert_called_once_with(volume)
self.mock_wait_for_status.mock.assert_called_once_with(
volume,
ready_statuses=["deleted"],
check_deletion=True,
update_resource=self.service._update_resource,
timeout=CONF.benchmark.cinder_volume_delete_timeout,
check_interval=CONF.benchmark.cinder_volume_delete_poll_interval
)
@mock.patch("%s.block.BlockStorage.create_volume" % BASE_PATH)
def test_extend_volume(self, mock_create_volume):
volume = mock_create_volume.return_value
self.service._wait_available_volume = mock.MagicMock()
self.service._wait_available_volume.return_value = fakes.FakeVolume()
self.assertEqual(self.service._wait_available_volume.return_value,
self.service.extend_volume(volume, 1))
self.cinder.volumes.extend.assert_called_once_with(volume, 1)
self.service._wait_available_volume.assert_called_once_with(volume)
def test_list_snapshots(self):
self.assertEqual(self.cinder.volume_snapshots.list.return_value,
self.service.list_snapshots())
self.cinder.volume_snapshots.list.assert_called_once_with(True)
def test_set_metadata(self):
volume = fakes.FakeVolume()
self.service.set_metadata(volume, sets=2, set_size=4)
calls = self.cinder.volumes.set_metadata.call_args_list
self.assertEqual(2, len(calls))
for call in calls:
call_volume, metadata = call[0]
self.assertEqual(volume, call_volume)
self.assertEqual(4, len(metadata))
def test_delete_metadata(self):
volume = fakes.FakeVolume()
keys = ["a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l"]
self.service.delete_metadata(volume, keys, deletes=3, delete_size=4)
calls = self.cinder.volumes.delete_metadata.call_args_list
self.assertEqual(3, len(calls))
all_deleted = []
for call in calls:
call_volume, del_keys = call[0]
self.assertEqual(volume, call_volume)
self.assertEqual(4, len(del_keys))
for key in del_keys:
self.assertIn(key, keys)
self.assertNotIn(key, all_deleted)
all_deleted.append(key)
def test_delete_metadata_not_enough_keys(self):
volume = fakes.FakeVolume()
keys = ["a", "b", "c", "d", "e"]
self.assertRaises(exceptions.InvalidArgumentsException,
self.service.delete_metadata,
volume, keys, deletes=2, delete_size=3)
def test_update_readonly_flag(self):
fake_volume = mock.MagicMock()
self.service.update_readonly_flag(fake_volume, "fake_flag")
self.cinder.volumes.update_readonly_flag.assert_called_once_with(
fake_volume, "fake_flag")
@mock.patch("rally.plugins.openstack.services.image.image.Image")
def test_upload_volume_to_image(self, mock_image):
volume = mock.Mock()
image = {"os-volume_upload_image": {"image_id": 1}}
self.cinder.volumes.upload_to_image.return_value = (None, image)
glance = mock_image.return_value
self.service.generate_random_name = mock.Mock(
return_value="test_vol")
self.service.upload_volume_to_image(volume, False,
"container", "disk")
self.cinder.volumes.upload_to_image.assert_called_once_with(
volume, False, "test_vol", "container", "disk")
self.mock_wait_for_status.mock.assert_has_calls([
mock.call(
volume,
ready_statuses=["available"],
update_resource=self.service._update_resource,
timeout=CONF.benchmark.cinder_volume_create_timeout,
check_interval=CONF.benchmark.
cinder_volume_create_poll_interval),
mock.call(
glance.get_image.return_value,
ready_statuses=["active"],
update_resource=glance.get_image,
timeout=CONF.benchmark.glance_image_create_timeout,
check_interval=CONF.benchmark.
glance_image_create_poll_interval)
])
glance.get_image.assert_called_once_with(1)
def test_create_qos(self):
specs = {"consumer": "both",
"write_iops_sec": "10",
"read_iops_sec": "1000"}
random_name = "random_name"
self.service.generate_random_name = mock.MagicMock(
return_value=random_name)
result = self.service.create_qos(specs)
self.assertEqual(
self.cinder.qos_specs.create.return_value,
result
)
self.cinder.qos_specs.create.assert_called_once_with(random_name,
specs)
def test_list_qos(self):
result = self.service.list_qos(True)
self.assertEqual(
self.cinder.qos_specs.list.return_value,
result
)
self.cinder.qos_specs.list.assert_called_once_with(True)
def test_get_qos(self):
result = self.service.get_qos("qos")
self.assertEqual(
self.cinder.qos_specs.get.return_value,
result)
self.cinder.qos_specs.get.assert_called_once_with("qos")
def test_set_qos(self):
set_specs_args = {"test": "foo"}
result = self.service.set_qos("qos", set_specs_args)
self.assertEqual(
self.cinder.qos_specs.set_keys.return_value,
result)
self.cinder.qos_specs.set_keys.assert_called_once_with("qos",
set_specs_args)
def test_qos_associate_type(self):
self.service.qos_associate_type("qos", "type_id")
self.cinder.qos_specs.associate.assert_called_once_with(
"qos", "type_id")
def test_qos_disassociate_type(self):
self.service.qos_disassociate_type("qos", "type_id")
self.cinder.qos_specs.disassociate.assert_called_once_with(
"qos", "type_id")
def test_delete_snapshot(self):
snapshot = mock.Mock()
self.service.delete_snapshot(snapshot)
self.cinder.volume_snapshots.delete.assert_called_once_with(snapshot)
self.mock_wait_for_status.mock.assert_called_once_with(
snapshot,
ready_statuses=["deleted"],
check_deletion=True,
update_resource=self.service._update_resource,
timeout=cfg.CONF.benchmark.cinder_volume_create_timeout,
check_interval=cfg.CONF.benchmark
.cinder_volume_create_poll_interval)
def test_delete_backup(self):
backup = mock.Mock()
self.service.delete_backup(backup)
self.cinder.backups.delete.assert_called_once_with(backup)
self.mock_wait_for_status.mock.assert_called_once_with(
backup,
ready_statuses=["deleted"],
check_deletion=True,
update_resource=self.service._update_resource,
timeout=cfg.CONF.benchmark.cinder_volume_create_timeout,
check_interval=cfg.CONF.benchmark
.cinder_volume_create_poll_interval)
def test_restore_backup(self):
backup = mock.Mock()
self.service._wait_available_volume = mock.MagicMock()
self.service._wait_available_volume.return_value = mock.Mock()
return_restore = self.service.restore_backup(backup.id, None)
self.cinder.restores.restore.assert_called_once_with(backup.id, None)
self.cinder.volumes.get.assert_called_once_with(
self.cinder.restores.restore.return_value.volume_id)
self.service._wait_available_volume.assert_called_once_with(
self.cinder.volumes.get.return_value)
self.assertEqual(self.service._wait_available_volume.return_value,
return_restore)
def test_list_backups(self):
return_backups_list = self.service.list_backups()
self.assertEqual(
self.cinder.backups.list.return_value,
return_backups_list)
def test_list_transfers(self):
return_transfers_list = self.service.list_transfers()
self.assertEqual(
self.cinder.transfers.list.return_value,
return_transfers_list)
def test_get_volume_type(self):
self.assertEqual(self.cinder.volume_types.get.return_value,
self.service.get_volume_type("volume_type"))
self.cinder.volume_types.get.assert_called_once_with(
"volume_type")
def test_delete_volume_type(self):
volume_type = mock.Mock()
self.service.delete_volume_type(volume_type)
self.cinder.volume_types.delete.assert_called_once_with(
volume_type)
def test_set_volume_type_keys(self):
volume_type = mock.Mock()
self.assertEqual(volume_type.set_keys.return_value,
self.service.set_volume_type_keys(
volume_type, metadata="metadata"))
volume_type.set_keys.assert_called_once_with("metadata")
def test_transfer_create(self):
fake_volume = mock.MagicMock()
random_name = "random_name"
self.service.generate_random_name = mock.MagicMock(
return_value=random_name)
result = self.service.transfer_create(fake_volume.id)
self.assertEqual(
self.cinder.transfers.create.return_value,
result)
self.cinder.transfers.create.assert_called_once_with(
fake_volume.id, name=random_name)
def test_transfer_create_with_name(self):
fake_volume = mock.MagicMock()
result = self.service.transfer_create(fake_volume.id, name="t")
self.assertEqual(
self.cinder.transfers.create.return_value,
result)
self.cinder.transfers.create.assert_called_once_with(
fake_volume.id, name="t")
def test_transfer_accept(self):
fake_transfer = mock.MagicMock()
result = self.service.transfer_accept(fake_transfer.id, "fake_key")
self.assertEqual(
self.cinder.transfers.accept.return_value,
result)
self.cinder.transfers.accept.assert_called_once_with(
fake_transfer.id, "fake_key")
def test_create_encryption_type(self):
volume_type = mock.Mock()
specs = {
"provider": "foo_pro",
"cipher": "foo_cip",
"key_size": 512,
"control_location": "foo_con"
}
result = self.service.create_encryption_type(volume_type, specs)
self.assertEqual(
self.cinder.volume_encryption_types.create.return_value, result)
self.cinder.volume_encryption_types.create.assert_called_once_with(
volume_type, specs)
def test_get_encryption_type(self):
volume_type = mock.Mock()
result = self.service.get_encryption_type(volume_type)
self.assertEqual(
self.cinder.volume_encryption_types.get.return_value, result)
self.cinder.volume_encryption_types.get.assert_called_once_with(
volume_type)
def test_list_encryption_type(self):
return_encryption_types_list = self.service.list_encryption_type()
self.assertEqual(self.cinder.volume_encryption_types.list.return_value,
return_encryption_types_list)
def test_delete_encryption_type(self):
resp = mock.MagicMock(status_code=202)
self.cinder.volume_encryption_types.delete.return_value = [resp]
self.service.delete_encryption_type("type")
self.cinder.volume_encryption_types.delete.assert_called_once_with(
"type")
def test_delete_encryption_type_raise(self):
resp = mock.MagicMock(status_code=404)
self.cinder.volume_encryption_types.delete.return_value = [resp]
self.assertRaises(exceptions.RallyException,
self.service.delete_encryption_type, "type")
self.cinder.volume_encryption_types.delete.assert_called_once_with(
"type")
def test_update_encryption_type(self):
volume_type = mock.Mock()
specs = {
"provider": "foo_pro",
"cipher": "foo_cip",
"key_size": 512,
"control_location": "foo_con"
}
result = self.service.update_encryption_type(volume_type, specs)
self.assertEqual(
self.cinder.volume_encryption_types.update.return_value, result)
self.cinder.volume_encryption_types.update.assert_called_once_with(
volume_type, specs)
class FullUnifiedCinder(cinder_common.UnifiedCinderMixin,
service.Service):
"""Implementation of UnifiedCinderMixin with Service base class."""
pass
class UnifiedCinderMixinTestCase(test.TestCase):
def setUp(self):
super(UnifiedCinderMixinTestCase, self).setUp()
self.clients = mock.MagicMock()
self.name_generator = mock.MagicMock()
self.impl = mock.MagicMock()
self.version = "some"
self.service = FullUnifiedCinder(
clients=self.clients, name_generator=self.name_generator)
self.service._impl = self.impl
self.service.version = self.version
def test__unify_backup(self):
class SomeBackup(object):
id = 1
name = "backup"
volume_id = "volume"
status = "st"
backup = self.service._unify_backup(SomeBackup())
self.assertEqual(1, backup.id)
self.assertEqual("backup", backup.name)
self.assertEqual("volume", backup.volume_id)
self.assertEqual("st", backup.status)
def test__unify_transfer(self):
class SomeTransfer(object):
id = 1
name = "transfer"
volume_id = "volume"
status = "st"
transfer = self.service._unify_backup(SomeTransfer())
self.assertEqual(1, transfer.id)
self.assertEqual("transfer", transfer.name)
self.assertEqual("volume", transfer.volume_id)
self.assertEqual("st", transfer.status)
def test__unify_qos(self):
class Qos(object):
id = 1
name = "qos"
specs = {"key1": "value1"}
qos = self.service._unify_qos(Qos())
self.assertEqual(1, qos.id)
self.assertEqual("qos", qos.name)
self.assertEqual({"key1": "value1"}, qos.specs)
def test__unify_encryption_type(self):
class SomeEncryptionType(object):
encryption_id = 1
volume_type_id = "volume_type"
encryption_type = self.service._unify_encryption_type(
SomeEncryptionType())
self.assertEqual(1, encryption_type.id)
self.assertEqual("volume_type", encryption_type.volume_type_id)
def test_delete_volume(self):
self.service.delete_volume("volume")
self.service._impl.delete_volume.assert_called_once_with("volume")
def test_set_metadata(self):
self.assertEqual(
self.service._impl.set_metadata.return_value,
self.service.set_metadata("volume", sets=10, set_size=3))
self.service._impl.set_metadata.assert_called_once_with(
"volume", set_size=3, sets=10)
def test_delete_metadata(self):
keys = ["a", "b"]
self.service.delete_metadata("volume", keys=keys, deletes=10,
delete_size=3)
self.service._impl.delete_metadata.assert_called_once_with(
"volume", keys=keys, delete_size=3, deletes=10)
def test_update_readonly_flag(self):
self.assertEqual(
self.service._impl.update_readonly_flag.return_value,
self.service.update_readonly_flag("volume", read_only=True))
self.service._impl.update_readonly_flag.assert_called_once_with(
"volume", read_only=True)
def test_upload_volume_to_image(self):
self.assertEqual(
self.service._impl.upload_volume_to_image.return_value,
self.service.upload_volume_to_image("volume",
force=False,
container_format="bare",
disk_format="raw"))
self.service._impl.upload_volume_to_image.assert_called_once_with(
"volume", container_format="bare", disk_format="raw", force=False)
def test_create_qos(self):
specs = {"consumer": "both",
"write_iops_sec": "10",
"read_iops_sec": "1000"}
self.service._unify_qos = mock.MagicMock()
self.assertEqual(
self.service._unify_qos.return_value,
self.service.create_qos(specs)
)
self.service._impl.create_qos.assert_called_once_with(specs)
self.service._unify_qos.assert_called_once_with(
self.service._impl.create_qos.return_value
)
def test_list_qos(self):
self.service._unify_qos = mock.MagicMock()
self.service._impl.list_qos.return_value = ["qos"]
self.assertEqual(
[self.service._unify_qos.return_value],
self.service.list_qos(True)
)
self.service._impl.list_qos.assert_called_once_with(True)
self.service._unify_qos.assert_called_once_with("qos")
def test_get_qos(self):
self.service._unify_qos = mock.MagicMock()
self.assertEqual(
self.service._unify_qos.return_value,
self.service.get_qos("qos"))
self.service._impl.get_qos.assert_called_once_with("qos")
self.service._unify_qos.assert_called_once_with(
self.service._impl.get_qos.return_value
)
def test_set_qos(self):
set_specs_args = {"test": "foo"}
self.service._unify_qos = mock.MagicMock()
qos = mock.MagicMock()
self.assertEqual(
self.service._unify_qos.return_value,
self.service.set_qos(qos, set_specs_args))
self.service._impl.set_qos.assert_called_once_with(qos.id,
set_specs_args)
self.service._unify_qos.assert_called_once_with(qos)
def test_qos_associate_type(self):
self.service._unify_qos = mock.MagicMock()
self.assertEqual(
self.service._unify_qos.return_value,
self.service.qos_associate_type("qos", "type_id"))
self.service._impl.qos_associate_type.assert_called_once_with(
"qos", "type_id")
self.service._unify_qos.assert_called_once_with("qos")
def test_qos_disassociate_type(self):
self.service._unify_qos = mock.MagicMock()
self.assertEqual(
self.service._unify_qos.return_value,
self.service.qos_disassociate_type("qos", "type_id"))
self.service._impl.qos_disassociate_type.assert_called_once_with(
"qos", "type_id")
self.service._unify_qos.assert_called_once_with("qos")
def test_delete_snapshot(self):
self.service.delete_snapshot("snapshot")
self.service._impl.delete_snapshot.assert_called_once_with("snapshot")
def test_delete_backup(self):
self.service.delete_backup("backup")
self.service._impl.delete_backup.assert_called_once_with("backup")
def test_list_backups(self):
self.service._unify_backup = mock.MagicMock()
self.service._impl.list_backups.return_value = ["backup"]
self.assertEqual([self.service._unify_backup.return_value],
self.service.list_backups(detailed=True))
self.service._impl.list_backups.assert_called_once_with(detailed=True)
self.service._unify_backup.assert_called_once_with(
"backup")
def test_list_transfers(self):
self.service._unify_transfer = mock.MagicMock()
self.service._impl.list_transfers.return_value = ["transfer"]
self.assertEqual(
[self.service._unify_transfer.return_value],
self.service.list_transfers(detailed=True, search_opts=None))
self.service._impl.list_transfers.assert_called_once_with(
detailed=True, search_opts=None)
self.service._unify_transfer.assert_called_once_with(
"transfer")
def test_get_volume_type(self):
self.assertEqual(self.service._impl.get_volume_type.return_value,
self.service.get_volume_type("volume_type"))
self.service._impl.get_volume_type.assert_called_once_with(
"volume_type")
def test_delete_volume_type(self):
self.assertEqual(self.service._impl.delete_volume_type.return_value,
self.service.delete_volume_type("volume_type"))
self.service._impl.delete_volume_type.assert_called_once_with(
"volume_type")
def test_set_volume_type_keys(self):
self.assertEqual(self.service._impl.set_volume_type_keys.return_value,
self.service.set_volume_type_keys(
"volume_type", metadata="metadata"))
self.service._impl.set_volume_type_keys.assert_called_once_with(
"volume_type", "metadata")
def test_transfer_create(self):
self.service._unify_transfer = mock.MagicMock()
self.assertEqual(self.service._unify_transfer.return_value,
self.service.transfer_create(1))
self.service._impl.transfer_create.assert_called_once_with(
1, name=None)
self.service._unify_transfer.assert_called_once_with(
self.service._impl.transfer_create.return_value)
def test_transfer_accept(self):
self.service._unify_transfer = mock.MagicMock()
self.assertEqual(self.service._unify_transfer.return_value,
self.service.transfer_accept(1, auth_key=2))
self.service._impl.transfer_accept.assert_called_once_with(
1, auth_key=2)
self.service._unify_transfer.assert_called_once_with(
self.service._impl.transfer_accept.return_value)
def test_create_encryption_type(self):
self.service._unify_encryption_type = mock.MagicMock()
self.assertEqual(
self.service._unify_encryption_type.return_value,
self.service.create_encryption_type("type", specs=2))
self.service._impl.create_encryption_type.assert_called_once_with(
"type", specs=2)
self.service._unify_encryption_type.assert_called_once_with(
self.service._impl.create_encryption_type.return_value)
def test_get_encryption_type(self):
self.service._unify_encryption_type = mock.MagicMock()
self.assertEqual(
self.service._unify_encryption_type.return_value,
self.service.get_encryption_type("type"))
self.service._impl.get_encryption_type.assert_called_once_with(
"type")
self.service._unify_encryption_type.assert_called_once_with(
self.service._impl.get_encryption_type.return_value)
def test_list_encryption_type(self):
self.service._unify_encryption_type = mock.MagicMock()
self.service._impl.list_encryption_type.return_value = ["encryption"]
self.assertEqual([self.service._unify_encryption_type.return_value],
self.service.list_encryption_type(search_opts=None))
self.service._impl.list_encryption_type.assert_called_once_with(
search_opts=None)
self.service._unify_encryption_type.assert_called_once_with(
"encryption")
def test_delete_encryption_type(self):
self.service.delete_encryption_type("type")
self.service._impl.delete_encryption_type.assert_called_once_with(
"type")
def test_update_encryption_type(self):
self.service.update_encryption_type("type", specs=3)
self.service._impl.update_encryption_type.assert_called_once_with(
"type", specs=3)
| {'content_hash': 'ef40d9668af13dcf8801727c6d521677', 'timestamp': '', 'source': 'github', 'line_count': 691, 'max_line_length': 79, 'avg_line_length': 41.465991316931984, 'alnum_prop': 0.6129550134366384, 'repo_name': 'yeming233/rally', 'id': 'a66751d37efa3d6399d7065dc89ea1fb7169e8ad', 'size': '29251', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'tests/unit/plugins/openstack/services/storage/test_cinder_common.py', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Mako', 'bytes': '46940'}, {'name': 'Python', 'bytes': '2561223'}, {'name': 'Shell', 'bytes': '43366'}]} |
---
layout: page
title: "About"
description: "Hey, this is YuHuo."
header-img: "img/about-bg.jpg"
---
<!-- Language Selector -->
<select onchange= "onLanChange(this.options[this.options.selectedIndex].value)">
<option value="0" selected> 中文 Chinese </option>
<option value="1"> 英文 English </option>
</select>
<!-- Chinese Version -->
<div class="zh post-container">
<!--copied from markdown -->
<blockquote><p>世界那么大,我想去看看。</p></blockquote>
<!--
.......
-->
<h5>Talks</h5>
<h5>Resumes</h5>
</div>
<!-- English Version -->
<div class="en post-container">
<blockquote><p>Yet another Frontend Engineer. <br>
Yet another Lifelong Designer.</p></blockquote>
<!--
........
-->
<h5>Talks</h5>
<h5>Resumes</h5>
</div>
<!-- Handle Language Change -->
<script type="text/javascript">
// get nodes
var $zh = document.querySelector(".zh");
var $en = document.querySelector(".en");
var $select = document.querySelector("select");
// bind hashchange event
window.addEventListener('hashchange', _render);
// handle render
function _render(){
var _hash = window.location.hash;
// en
if(_hash == "#en"){
$select.selectedIndex = 1;
$en.style.display = "block";
$zh.style.display = "none";
// zh by default
}else{
// not trigger onChange, otherwise cause a loop call.
$select.selectedIndex = 0;
$zh.style.display = "block";
$en.style.display = "none";
}
}
// handle select change
function onLanChange(index){
if(index == 0){
window.location.hash = "#zh"
}else{
window.location.hash = "#en"
}
}
// init
_render();
</script>
{% if site.duoshuo_username %}
<!-- 多说评论框 start -->
<div class="comment">
<div class="ds-thread"
{% if site.duoshuo_username == "YuHuo" %}
data-thread-id="{{page.id}}"
{% else %}
data-thread-key="{{site.duoshuo_username}}/about"
{% endif %}
data-title="{{page.title}}"
data-url="{{site.url}}/about/"></div>
</div>
<!-- 多说评论框 end -->
<!-- 多说公共JS代码 start (一个网页只需插入一次) -->
<script type="text/javascript">
// dynamic User hacking by YuHuo
var _user = '{{site.duoshuo_username}}';
// duoshuo comment query.
var duoshuoQuery = {short_name: _user };
(function() {
var ds = document.createElement('script');
ds.type = 'text/javascript';ds.async = true;
ds.src = (document.location.protocol == 'https:' ? 'https:' : 'http:') + '//static.duoshuo.com/embed.js';
ds.charset = 'UTF-8';
(document.getElementsByTagName('head')[0]
|| document.getElementsByTagName('body')[0]).appendChild(ds);
})();
</script>
<!-- 多说公共JS代码 end -->
{% endif %}
{% if site.disqus_username %}
<!-- disqus 评论框 start -->
<div class="comment">
<div id="disqus_thread" class="disqus-thread">
</div>
</div>
<!-- disqus 评论框 end -->
<!-- disqus 公共JS代码 start (一个网页只需插入一次) -->
<script type="text/javascript">
/* * * CONFIGURATION VARIABLES * * */
var disqus_shortname = "{{site.disqus_username}}";
var disqus_identifier = "{{site.disqus_username}}/{{page.url}}";
var disqus_url = "{{site.url}}{{page.url}}";
(function() {
var dsq = document.createElement('script'); dsq.type = 'text/javascript'; dsq.async = true;
dsq.src = '//' + disqus_shortname + '.disqus.com/embed.js';
(document.getElementsByTagName('head')[0] || document.getElementsByTagName('body')[0]).appendChild(dsq);
})();
</script>
<!-- disqus 公共JS代码 end -->
{% endif %}
| {'content_hash': '0121854898ee9db997a780e36bbf659c', 'timestamp': '', 'source': 'github', 'line_count': 153, 'max_line_length': 113, 'avg_line_length': 24.0718954248366, 'alnum_prop': 0.5701873472712463, 'repo_name': 'desireall/desireall.github.io', 'id': 'fa5e909d4a88a82c064685fd3cff92cf9a74dd8d', 'size': '3827', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'about.html', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'CSS', 'bytes': '51349'}, {'name': 'HTML', 'bytes': '50103'}, {'name': 'JavaScript', 'bytes': '12381'}]} |
package com.telerikacademy.meetup.view.home;
import android.util.Log;
import com.telerikacademy.meetup.model.base.ILocation;
import com.telerikacademy.meetup.provider.base.ILocationProvider;
import com.telerikacademy.meetup.view.home.base.IHomeHeaderContract;
import javax.inject.Inject;
public class HomeHeaderPresenter implements IHomeHeaderContract.Presenter {
private static final String TAG = HomeHeaderPresenter.class.getSimpleName();
private ILocationProvider locationProvider;
private IHomeHeaderContract.View view;
private ILocation currentLocation;
@Inject
public HomeHeaderPresenter(ILocationProvider locationProvider) {
this.locationProvider = locationProvider;
setupLocationListeners();
}
@Override
public void setView(IHomeHeaderContract.View view) {
this.view = view;
}
@Override
public void subscribe() {
locationProvider.connect();
}
@Override
public void unsubscribe() {
locationProvider.disconnect();
}
@Override
public void update() {
view.requestPermissions();
view.showEnableLocationDialog();
if (view.checkPermissions() &&
!locationProvider.isConnected() &&
!locationProvider.isConnecting()) {
locationProvider.connect();
}
setTitle(currentLocation);
}
@Override
public ILocation getLocation() {
return currentLocation;
}
private void setupLocationListeners() {
locationProvider.setOnLocationChangeListener(new ILocationProvider.IOnLocationChangeListener() {
@Override
public void onLocationChange(ILocation location) {
if (currentLocation == null) {
setTitle(location);
}
currentLocation = location;
}
});
locationProvider.setOnConnectedListener(new ILocationProvider.IOnConnectedListener() {
@Override
public void onConnected(ILocation location) {
currentLocation = location;
setTitle(location);
}
});
locationProvider.setOnConnectionFailedListener(new ILocationProvider.IOnConnectionFailedListener() {
@Override
public void onConnectionFailed(String errorMessage) {
Log.e(TAG, errorMessage);
setTitle(null);
}
});
}
private void setTitle(ILocation location) {
final String LOCATION_NOT_FOUND = "Unknown location";
if (location == null) {
view.setTitle(LOCATION_NOT_FOUND);
view.setSubtitle("");
return;
}
String locality = location.getLocality();
String thoroughfare = location.getThoroughfare();
String subThoroughfare = location.getSubThoroughfare();
locality = locality == null ? "" : locality;
thoroughfare = thoroughfare == null ? "" : thoroughfare;
subThoroughfare = subThoroughfare == null ? "" : subThoroughfare;
if (locality.isEmpty() && thoroughfare.isEmpty()) {
view.setTitle(LOCATION_NOT_FOUND);
view.setSubtitle("");
} else if (locality.isEmpty()) {
view.setTitle(thoroughfare);
view.setSubtitle(subThoroughfare);
} else {
view.setTitle(locality);
String subtitle;
if (!thoroughfare.isEmpty()) {
subtitle = thoroughfare;
if (!subThoroughfare.isEmpty()) {
subtitle = String.format("%s, %s", thoroughfare, subThoroughfare);
}
} else {
subtitle = subThoroughfare;
}
view.setSubtitle(subtitle);
}
}
}
| {'content_hash': '3a49a3c164fab09b7f758b2ce9d1b6ad', 'timestamp': '', 'source': 'github', 'line_count': 125, 'max_line_length': 108, 'avg_line_length': 30.616, 'alnum_prop': 0.6096158871178469, 'repo_name': 'MathRandomNext/Android', 'id': 'e23e0227e5c6676fca03e407a48444b1f3b3f3a7', 'size': '3827', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'Meetup/app/src/main/java/com/telerikacademy/meetup/view/home/HomeHeaderPresenter.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Java', 'bytes': '245124'}]} |
package org.apache.asterix.lang.sqlpp.util;
import java.io.ByteArrayOutputStream;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.List;
import org.apache.asterix.common.exceptions.AsterixException;
import org.apache.asterix.lang.common.base.ILangExpression;
import org.apache.asterix.lang.common.base.Statement;
import org.apache.asterix.lang.sqlpp.visitor.SqlppFormatPrintVisitor;
public class SqlppFormatPrintUtil {
/**
* Prints the formatted output of an ILangExpression.
*
* @param expr
* the language expression.
* @param output
* a writer for printing strings.
* @throws AsterixException
*/
public static void print(ILangExpression expr, PrintWriter output) throws AsterixException {
SqlppFormatPrintVisitor visitor = new SqlppFormatPrintVisitor(output);
expr.accept(visitor, 0);
}
/**
* Prints the formatted output of a list of top-level language statements.
*
* @param statements
* a list of statements of a query
* @param output
* a writer for printing strings.
* @throws AsterixException
*/
public static void print(List<Statement> statements, PrintWriter output) throws AsterixException {
SqlppFormatPrintVisitor visitor = new SqlppFormatPrintVisitor(output);
for (Statement statement : statements) {
statement.accept(visitor, 0);
}
}
/**
* @param expr
* a language expression.
* @return a formatted string of a language expression.
* @throws AsterixException
*/
public static String toString(ILangExpression expr) throws AsterixException {
List<ILangExpression> exprs = new ArrayList<ILangExpression>();
exprs.add(expr);
return toString(exprs);
}
/**
* @param exprs
* a list of language expression.
* @return a formatted string of the input language expressions.
* @throws AsterixException
*/
public static String toString(List<ILangExpression> exprs) throws AsterixException {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
PrintWriter output = new PrintWriter(bos);
SqlppFormatPrintVisitor visitor = new SqlppFormatPrintVisitor(output);
for (ILangExpression expr : exprs) {
expr.accept(visitor, 0);
}
output.close();
return new String(bos.toByteArray());
}
}
| {'content_hash': 'f648c705ba9d7c2a2c6be33f233265c5', 'timestamp': '', 'source': 'github', 'line_count': 75, 'max_line_length': 102, 'avg_line_length': 33.42666666666667, 'alnum_prop': 0.6669325887514959, 'repo_name': 'amoudi87/asterixdb', 'id': 'f10c9fc319453b04b82be5eafcaf34086f88959b', 'size': '3314', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/util/SqlppFormatPrintUtil.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Batchfile', 'bytes': '6115'}, {'name': 'CSS', 'bytes': '4763'}, {'name': 'Crystal', 'bytes': '453'}, {'name': 'HTML', 'bytes': '114488'}, {'name': 'Java', 'bytes': '9375622'}, {'name': 'JavaScript', 'bytes': '237719'}, {'name': 'Python', 'bytes': '268336'}, {'name': 'Ruby', 'bytes': '2666'}, {'name': 'Scheme', 'bytes': '1105'}, {'name': 'Shell', 'bytes': '182529'}, {'name': 'Smarty', 'bytes': '31412'}]} |
package hakisute.entity;
import hakisute.entity.EntryTagRelationNames._EntryTagRelationNames;
import hakisute.entity.UserNames._UserNames;
import java.sql.Timestamp;
import javax.annotation.Generated;
import org.seasar.extension.jdbc.name.PropertyName;
/**
* {@link Entry}のプロパティ名の集合です。
*
*/
@Generated(value = {"S2JDBC-Gen 2.4.46", "org.seasar.extension.jdbc.gen.internal.model.NamesModelFactoryImpl"}, date = "2015/08/14 1:58:49")
public class EntryNames {
/**
* entryIdのプロパティ名を返します。
*
* @return entryIdのプロパティ名
*/
public static PropertyName<Integer> entryId() {
return new PropertyName<Integer>("entryId");
}
/**
* entryBodyのプロパティ名を返します。
*
* @return entryBodyのプロパティ名
*/
public static PropertyName<String> entryBody() {
return new PropertyName<String>("entryBody");
}
/**
* insertDateのプロパティ名を返します。
*
* @return insertDateのプロパティ名
*/
public static PropertyName<Timestamp> insertDate() {
return new PropertyName<Timestamp>("insertDate");
}
/**
* updateDateのプロパティ名を返します。
*
* @return updateDateのプロパティ名
*/
public static PropertyName<Timestamp> updateDate() {
return new PropertyName<Timestamp>("updateDate");
}
/**
* deleteFlgのプロパティ名を返します。
*
* @return deleteFlgのプロパティ名
*/
public static PropertyName<Boolean> deleteFlg() {
return new PropertyName<Boolean>("deleteFlg");
}
/**
* userIdのプロパティ名を返します。
*
* @return userIdのプロパティ名
*/
public static PropertyName<Integer> userId() {
return new PropertyName<Integer>("userId");
}
/**
* userのプロパティ名を返します。
*
* @return userのプロパティ名
*/
public static _UserNames user() {
return new _UserNames("user");
}
/**
* entryTagRelationListのプロパティ名を返します。
*
* @return entryTagRelationListのプロパティ名
*/
public static _EntryTagRelationNames entryTagRelationList() {
return new _EntryTagRelationNames("entryTagRelationList");
}
/**
* @author S2JDBC-Gen
*/
public static class _EntryNames extends PropertyName<Entry> {
/**
* インスタンスを構築します。
*/
public _EntryNames() {
}
/**
* インスタンスを構築します。
*
* @param name
* 名前
*/
public _EntryNames(final String name) {
super(name);
}
/**
* インスタンスを構築します。
*
* @param parent
* 親
* @param name
* 名前
*/
public _EntryNames(final PropertyName<?> parent, final String name) {
super(parent, name);
}
/**
* entryIdのプロパティ名を返します。
*
* @return entryIdのプロパティ名
*/
public PropertyName<Integer> entryId() {
return new PropertyName<Integer>(this, "entryId");
}
/**
* entryBodyのプロパティ名を返します。
*
* @return entryBodyのプロパティ名
*/
public PropertyName<String> entryBody() {
return new PropertyName<String>(this, "entryBody");
}
/**
* insertDateのプロパティ名を返します。
*
* @return insertDateのプロパティ名
*/
public PropertyName<Timestamp> insertDate() {
return new PropertyName<Timestamp>(this, "insertDate");
}
/**
* updateDateのプロパティ名を返します。
*
* @return updateDateのプロパティ名
*/
public PropertyName<Timestamp> updateDate() {
return new PropertyName<Timestamp>(this, "updateDate");
}
/**
* deleteFlgのプロパティ名を返します。
*
* @return deleteFlgのプロパティ名
*/
public PropertyName<Boolean> deleteFlg() {
return new PropertyName<Boolean>(this, "deleteFlg");
}
/**
* userIdのプロパティ名を返します。
*
* @return userIdのプロパティ名
*/
public PropertyName<Integer> userId() {
return new PropertyName<Integer>(this, "userId");
}
/**
* userのプロパティ名を返します。
*
* @return userのプロパティ名
*/
public _UserNames user() {
return new _UserNames(this, "user");
}
/**
* entryTagRelationListのプロパティ名を返します。
*
* @return entryTagRelationListのプロパティ名
*/
public _EntryTagRelationNames entryTagRelationList() {
return new _EntryTagRelationNames(this, "entryTagRelationList");
}
}
} | {'content_hash': 'b9bc85d405984aae556eca7d6be0073b', 'timestamp': '', 'source': 'github', 'line_count': 193, 'max_line_length': 140, 'avg_line_length': 23.906735751295336, 'alnum_prop': 0.5485478977026441, 'repo_name': 'hemogawa/Hakisute', 'id': '284e65b63e7800d0cd12ba12143f38c29396cabb', 'size': '5368', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'src/main/java/hakisute/entity/EntryNames.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Java', 'bytes': '85784'}]} |
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS-IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* @fileoverview Provide atomic CRUD database operations.
*
* The actual operation is implemented in transaction queue. This class create
* a new transaction queue as necessary.
*
* @author [email protected] (Kyaw Tun)
*/
goog.provide('ydn.db.crud.Storage');
goog.require('ydn.db.crud.DbOperator');
goog.require('ydn.db.crud.IOperator');
goog.require('ydn.db.crud.req.IRequestExecutor');
goog.require('ydn.db.events.RecordEvent');
goog.require('ydn.db.events.StoreEvent');
goog.require('ydn.db.tr.Storage');
goog.require('ydn.object');
/**
* Construct storage providing atomic CRUD database operations on implemented
* storage mechanisms.
*
* This class do not execute database operation, but create a non-overlapping
* transaction queue on ydn.db.crud.DbOperator and all operations are
* passed to it.
*
*
* @param {string=} opt_dbname database name.
* @param {(ydn.db.schema.Database|!DatabaseSchema)=} opt_schema database
* schema
* or its configuration in JSON format. If not provided, default empty schema
* is used.
* @param {!StorageOptions=} opt_options options.
* @extends {ydn.db.tr.Storage}
* @implements {ydn.db.crud.IOperator}
* @constructor
*/
ydn.db.crud.Storage = function(opt_dbname, opt_schema, opt_options) {
goog.base(this, opt_dbname, opt_schema, opt_options);
var schema = this.schema;
for (var i = 0; i < schema.countFullTextIndex(); i++) {
var ft_schema = schema.fullTextIndex(i);
var store = schema.getStore(ft_schema.getName());
if (store) {
if (!store.hasIndex('k')) {
throw new ydn.debug.error.ArgumentException('full text index store "' +
store.getName() + '" must have "keyword" index');
}
if (!store.hasIndex('v')) {
throw new ydn.debug.error.ArgumentException('full text index store "' +
store.getName() + '" must have "keyword" index');
}
if (store.getKeyPath() != 'id') {
throw new ydn.debug.error.ArgumentException('full text index store "' +
store.getName() + '" must use "id" as key path.');
}
} else {
throw new ydn.debug.error.ArgumentException('full text index store "' +
ft_schema.getName() + '" required.');
}
for (var j = 0; j < ft_schema.count(); j++) {
var index = ft_schema.index(j);
var source_store = schema.getStore(index.getStoreName());
if (source_store) {
this.addFullTextIndexer(source_store, ft_schema);
} else {
throw new ydn.debug.error.ArgumentException('full text source store "' +
index.getStoreName() + '" does not exist for full text index "' +
ft_schema.getName() + '"');
}
}
}
};
goog.inherits(ydn.db.crud.Storage, ydn.db.tr.Storage);
/**
* @override
*/
ydn.db.crud.Storage.prototype.init = function() {
goog.base(this, 'init');
};
/**
* @inheritDoc
*/
ydn.db.crud.Storage.prototype.newOperator = function(tx_thread, sync_thread) {
return new ydn.db.crud.DbOperator(this, this.schema, tx_thread, sync_thread);
};
/**
* Cast operator.
* @return {ydn.db.crud.DbOperator}
*/
ydn.db.crud.Storage.prototype.getCoreOperator = function() {
return /** @type {ydn.db.crud.DbOperator} */ (this.db_operator);
};
/**
* @inheritDoc
*/
ydn.db.crud.Storage.prototype.add = function(store, value, opt_key) {
return this.getCoreOperator().add(store, value, opt_key);
};
/**
*
* @inheritDoc
*/
ydn.db.crud.Storage.prototype.count = function(store_name, key_range, index,
unique) {
return this.getCoreOperator().count(store_name, key_range, index, unique);
};
/**
* @inheritDoc
*/
ydn.db.crud.Storage.prototype.get = function(arg1, arg2) {
return this.getCoreOperator().get(arg1, arg2);
};
/**
*
* @inheritDoc
*/
ydn.db.crud.Storage.prototype.keys = function(store_name, arg2, arg3, arg4,
arg5, arg6, arg7) {
// return ydn.db.crud.DbOperator.prototype.keys.apply(
// /** @type {ydn.db.crud.DbOperator} */ (this.base_tx_queue),
// Array.prototype.slice.call(arguments));
// above trick is the same effect as follow
//return this.getCoreOperator().keys(store_name, arg2, arg3,
// arg4, arg5, arg6, arg7);
// but it preserve argument length
return this.getCoreOperator().keys(store_name, arg2, arg3, arg4, arg5, arg6,
arg7);
};
/**
* @inheritDoc
*/
ydn.db.crud.Storage.prototype.values = function(arg1, arg2, arg3, arg4, arg5,
arg6) {
return this.getCoreOperator().values(arg1, arg2, arg3, arg4, arg5, arg6);
};
/**
* List
* @param {ydn.db.base.QueryMethod} type
* @param {string} store_name
* @param {string=} index_name
* @param {ydn.db.KeyRange|ydn.db.IDBKeyRange=} key_range
* @param {number=} limit
* @param {number=} offset
* @param {boolean=} reverse
* @param {boolean=} unique
* @param {Array.<IDBKey|undefined>=} opt_pos last cursor position.
* @return {!ydn.db.Request}
*/
ydn.db.crud.Storage.prototype.list = function(type, store_name, index_name,
key_range, limit, offset, reverse, unique, opt_pos) {
return this.getCoreOperator().list(type, store_name, index_name,
key_range, limit, offset, reverse, unique, opt_pos);
};
/**
* @inheritDoc
*/
ydn.db.crud.Storage.prototype.load = function(store_name_or_schema, data,
delimiter) {
return this.getCoreOperator().load(store_name_or_schema, data, delimiter);
};
/**
* @inheritDoc
*/
ydn.db.crud.Storage.prototype.put = function(store, value, opt_key) {
return this.getCoreOperator().put(store, value, opt_key);
};
/**
* @inheritDoc
*/
ydn.db.crud.Storage.prototype.clear = function(arg1, arg2, arg3) {
return this.getCoreOperator().clear(arg1, arg2, arg3);
};
/**
* @inheritDoc
*/
ydn.db.crud.Storage.prototype.remove = function(arg1, arg2, arg3) {
return this.getCoreOperator().remove(arg1, arg2, arg3);
};
if (goog.DEBUG) {
/** @override */
ydn.db.crud.Storage.prototype.toString = function() {
var s = 'Storage:' + this.getName();
if (this.isReady()) {
s += ' [' + this.getType() + ']';
}
return s;
};
}
| {'content_hash': '2c9738096b48d231474c79a03df671ce', 'timestamp': '', 'source': 'github', 'line_count': 242, 'max_line_length': 80, 'avg_line_length': 27.983471074380166, 'alnum_prop': 0.6450088600118133, 'repo_name': 'jayteemi/educatoursja', 'id': 'b1230c8afa443905685d342599800714a2c71f19', 'size': '6772', 'binary': False, 'copies': '7', 'ref': 'refs/heads/master', 'path': 'www2/lib/ydn.db/src/ydn/db/crud/storage.js', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'C++', 'bytes': '94'}, {'name': 'CSS', 'bytes': '1273586'}, {'name': 'DIGITAL Command Language', 'bytes': '97'}, {'name': 'HTML', 'bytes': '284515'}, {'name': 'JavaScript', 'bytes': '6895217'}, {'name': 'Shell', 'bytes': '461'}, {'name': 'TypeScript', 'bytes': '8439'}]} |
package org.optaplanner.core.impl.exhaustivesearch.node.comparator;
import static org.assertj.core.api.SoftAssertions.assertSoftly;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.util.Comparator;
import org.optaplanner.core.api.score.buildin.simple.SimpleScore;
import org.optaplanner.core.impl.exhaustivesearch.node.ExhaustiveSearchNode;
public abstract class AbstractNodeComparatorTest {
protected ExhaustiveSearchNode buildNode(int depth, String score, long parentBreadth, long breadth) {
return buildNode(depth,
SimpleScore.parseScore(score),
SimpleScore.parseScore(score).withInitScore(0),
parentBreadth, breadth);
}
protected ExhaustiveSearchNode buildNode(int depth, String score, int optimisticBound,
long parentBreadth, long breadth) {
return buildNode(depth,
SimpleScore.parseScore(score),
SimpleScore.of(optimisticBound),
parentBreadth, breadth);
}
protected ExhaustiveSearchNode buildNode(int depth, SimpleScore score, SimpleScore optimisticBound,
long parentBreadth, long breadth) {
ExhaustiveSearchNode node = mock(ExhaustiveSearchNode.class);
when(node.getDepth()).thenReturn(depth);
when(node.getScore()).thenReturn(score);
when(node.getOptimisticBound()).thenReturn(optimisticBound);
when(node.getParentBreadth()).thenReturn(parentBreadth);
when(node.getBreadth()).thenReturn(breadth);
when(node.toString()).thenReturn(score.toString());
return node;
}
protected static void assertLesser(Comparator<ExhaustiveSearchNode> comparator,
ExhaustiveSearchNode a, ExhaustiveSearchNode b) {
assertSoftly(softly -> {
softly.assertThat(comparator.compare(a, b))
.as("Node (" + a + ") must be lesser than node (" + b + ").")
.isLessThan(0);
softly.assertThat(comparator.compare(b, a))
.as("Node (" + b + ") must be greater than node (" + a + ").")
.isGreaterThan(0);
});
}
protected static void assertScoreCompareToOrder(Comparator<ExhaustiveSearchNode> comparator,
ExhaustiveSearchNode... nodes) {
for (int i = 0; i < nodes.length; i++) {
for (int j = i + 1; j < nodes.length; j++) {
assertLesser(comparator, nodes[i], nodes[j]);
}
}
}
}
| {'content_hash': '332d21aa81dba3da3d48dbe1e12c8879', 'timestamp': '', 'source': 'github', 'line_count': 64, 'max_line_length': 105, 'avg_line_length': 39.71875, 'alnum_prop': 0.6463414634146342, 'repo_name': 'droolsjbpm/optaplanner', 'id': '78ac88d4d3adb0a856a216473adf006ae91d6efd', 'size': '3162', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'optaplanner-core/src/test/java/org/optaplanner/core/impl/exhaustivesearch/node/comparator/AbstractNodeComparatorTest.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Batchfile', 'bytes': '2602'}, {'name': 'CSS', 'bytes': '13781'}, {'name': 'FreeMarker', 'bytes': '114386'}, {'name': 'HTML', 'bytes': '678'}, {'name': 'Java', 'bytes': '6988206'}, {'name': 'JavaScript', 'bytes': '215434'}, {'name': 'Shell', 'bytes': '1548'}]} |
'use strict';
var mongoose = require('mongoose');
var ObjectId = mongoose.Types.ObjectId;
var _ = require('lodash');
var debug = require('debug')('formio:middleware:bootstrapFormAccess');
/**
* Middleware to bootstrap the access of forms.
*
* When a new form is made, iterate all the roles to add to the form.
*
* @param req
* @param res
* @param next
* @returns {*}
*/
module.exports = function(router) {
var hook = require('../util/hook')(router.formio);
return function bootstrapFormAccess(req, res, next) {
// Only bootstrap access on Form creation.
if (req.method !== 'POST' || !res || !res.hasOwnProperty('resource') || !res.resource.item) {
debug('Skipping');
return next();
}
// Query the roles collection, to build the updated form access list.
router.formio.resources.role.model
.find(hook.alter('roleQuery', {deleted: {$eq: null}}, req))
.exec(function(err, roles) {
if (err) {
debug(err);
return next(err);
}
if (!roles || roles.length === 0) {
debug('No roles found');
return next();
}
// Convert the roles to ObjectIds before saving.
debug(roles);
roles = _.map(roles, function(role) {
return ObjectId(role.toObject()._id);
});
var update = [{type: 'read_all', roles: roles}];
debug(update);
router.formio.resources.form.model.findOne({_id: res.resource.item._id, deleted: {$eq: null}})
.exec(function(err, form) {
if (err) {
debug(err);
return next(err);
}
if (!form) {
debug('No form found with _id: ' + res.resource.item._id);
return next();
}
// Update the actual form in mongo to reflect the access changes.
form.access = update;
form.save(function(err, form) {
if (err) {
debug(err);
return next(err);
}
// Update the response to reflect the access changes.
// Filter the response to have no __v and deleted key.
var ret = _.omit(_.omit(form.toObject(), 'deleted'), '__v');
res.resource.item = ret;
next();
});
});
});
};
};
| {'content_hash': 'e7a1c21ef6c849386f496e3c06708b5d', 'timestamp': '', 'source': 'github', 'line_count': 77, 'max_line_length': 102, 'avg_line_length': 30.623376623376622, 'alnum_prop': 0.5313825275657337, 'repo_name': 'richgo/formio', 'id': 'a8be0ee310eee058ff2e7f078307d406a6a54047', 'size': '2358', 'binary': False, 'copies': '2', 'ref': 'refs/heads/master', 'path': 'src/middleware/bootstrapFormAccess.js', 'mode': '33188', 'license': 'bsd-3-clause', 'language': [{'name': 'HTML', 'bytes': '2670'}, {'name': 'JavaScript', 'bytes': '1597174'}]} |
package kieker.common.record.flow.trace.concurrency.monitor;
import java.nio.BufferOverflowException;
import kieker.common.record.flow.trace.concurrency.monitor.AbstractMonitorEvent;
import kieker.common.record.io.IValueDeserializer;
import kieker.common.record.io.IValueSerializer;
import kieker.common.util.registry.IRegistry;
/**
* @author Jan Waller
* API compatibility: Kieker 1.13.0
*
* @since 1.8
*/
public class MonitorWaitEvent extends AbstractMonitorEvent {
private static final long serialVersionUID = -8450610362516231950L;
/** Descriptive definition of the serialization size of the record. */
public static final int SIZE = TYPE_SIZE_LONG // IEventRecord.timestamp
+ TYPE_SIZE_LONG // ITraceRecord.traceId
+ TYPE_SIZE_INT // ITraceRecord.orderIndex
+ TYPE_SIZE_INT // AbstractMonitorEvent.lockId
;
public static final Class<?>[] TYPES = {
long.class, // IEventRecord.timestamp
long.class, // ITraceRecord.traceId
int.class, // ITraceRecord.orderIndex
int.class, // AbstractMonitorEvent.lockId
};
/** property name array. */
private static final String[] PROPERTY_NAMES = {
"timestamp",
"traceId",
"orderIndex",
"lockId",
};
/**
* Creates a new instance of this class using the given parameters.
*
* @param timestamp
* timestamp
* @param traceId
* traceId
* @param orderIndex
* orderIndex
* @param lockId
* lockId
*/
public MonitorWaitEvent(final long timestamp, final long traceId, final int orderIndex, final int lockId) {
super(timestamp, traceId, orderIndex, lockId);
}
/**
* This constructor converts the given array into a record.
* It is recommended to use the array which is the result of a call to {@link #toArray()}.
*
* @param values
* The values for the record.
*
* @deprecated since 1.13. Use {@link #MonitorWaitEvent(IValueDeserializer)} instead.
*/
@Deprecated
public MonitorWaitEvent(final Object[] values) { // NOPMD (direct store of values)
super(values, TYPES);
}
/**
* This constructor uses the given array to initialize the fields of this record.
*
* @param values
* The values for the record.
* @param valueTypes
* The types of the elements in the first array.
*
* @deprecated since 1.13. Use {@link #MonitorWaitEvent(IValueDeserializer)} instead.
*/
@Deprecated
protected MonitorWaitEvent(final Object[] values, final Class<?>[] valueTypes) { // NOPMD (values stored directly)
super(values, valueTypes);
}
/**
* @param deserializer
* The deserializer to use
*/
public MonitorWaitEvent(final IValueDeserializer deserializer) {
super(deserializer);
}
/**
* {@inheritDoc}
*
* @deprecated since 1.13. Use {@link #serialize(IValueSerializer)} with an array serializer instead.
*/
@Override
@Deprecated
public Object[] toArray() {
return new Object[] {
this.getTimestamp(),
this.getTraceId(),
this.getOrderIndex(),
this.getLockId()
};
}
/**
* {@inheritDoc}
*/
@Override
public void registerStrings(final IRegistry<String> stringRegistry) { // NOPMD (generated code)
}
/**
* {@inheritDoc}
*/
@Override
public void serialize(final IValueSerializer serializer) throws BufferOverflowException {
//super.serialize(serializer);
serializer.putLong(this.getTimestamp());
serializer.putLong(this.getTraceId());
serializer.putInt(this.getOrderIndex());
serializer.putInt(this.getLockId());
}
/**
* {@inheritDoc}
*/
@Override
public Class<?>[] getValueTypes() {
return TYPES; // NOPMD
}
/**
* {@inheritDoc}
*/
@Override
public String[] getValueNames() {
return PROPERTY_NAMES; // NOPMD
}
/**
* {@inheritDoc}
*/
@Override
public int getSize() {
return SIZE;
}
/**
* {@inheritDoc}
*
* @deprecated This record uses the {@link kieker.common.record.IMonitoringRecord.Factory} mechanism. Hence, this method is not implemented.
*/
@Override
@Deprecated
public void initFromArray(final Object[] values) {
throw new UnsupportedOperationException();
}
/**
* {@inheritDoc}
*/
@Override
public boolean equals(final Object obj) {
if (obj == null) return false;
if (obj == this) return true;
if (obj.getClass() != this.getClass()) return false;
final MonitorWaitEvent castedRecord = (MonitorWaitEvent) obj;
if (this.getLoggingTimestamp() != castedRecord.getLoggingTimestamp()) return false;
if (this.getTimestamp() != castedRecord.getTimestamp()) return false;
if (this.getTraceId() != castedRecord.getTraceId()) return false;
if (this.getOrderIndex() != castedRecord.getOrderIndex()) return false;
if (this.getLockId() != castedRecord.getLockId()) return false;
return true;
}
}
| {'content_hash': '790963159c2368563280f12d86825f26', 'timestamp': '', 'source': 'github', 'line_count': 185, 'max_line_length': 141, 'avg_line_length': 25.783783783783782, 'alnum_prop': 0.6880503144654088, 'repo_name': 'leadwire-apm/leadwire-javaagent', 'id': '26d5a521064203c5c33359915e6e3040103683ca', 'size': '5546', 'binary': False, 'copies': '1', 'ref': 'refs/heads/stable', 'path': 'leadwire-common/src-gen/kieker/common/record/flow/trace/concurrency/monitor/MonitorWaitEvent.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'CSS', 'bytes': '11219'}, {'name': 'Java', 'bytes': '2473607'}, {'name': 'Python', 'bytes': '14400'}]} |
<div class="layer">
<div class="profile-user-info profile-user-info-striped" >
<div class="profile-info-row">
<div class="profile-info-name">姓名</div>
<div class="profile-info-value">
<span class="editable editable-click">${user.name!}</span>
</div>
</div>
<div class="profile-info-row">
<div class="profile-info-name">归属机构</div>
<div class="profile-info-value">
<span class="editable editable-click">
@var useroffice = office.getOfficeStrByOfficeId(user.officeId);
@if(!isEmpty(useroffice)){
(${useroffice})
@}
</span>
</div>
</div>
<div class="profile-info-row">
<div class="profile-info-name">角色</div>
<div class="profile-info-value">
<span class="editable editable-click">
@for(role in roles){
@if(isEmpty(role)) continue;
${role.name!}
@var sostr = office.getOfficeStrByOfficeId(role.officeId);
@if(!isEmpty(sostr)){
(${sostr})${decode(roleLP.last,true,"",",")}
@}
@}
</span>
</div>
</div>
<div class="profile-info-row">
<div class="profile-info-name">登录名</div>
<div class="profile-info-value">
<span class="editable editable-click">
${user.username!}
</span>
</div>
</div>
<div class="profile-info-row">
<div class="profile-info-name">电话</div>
<div class="profile-info-value">
<span class="editable editable-click">${user.mobile!}</span>
</div>
</div>
<div class="profile-info-row">
<div class="profile-info-name">手机</div>
<div class="profile-info-value">
<span class="editable editable-click">
${user.phone!}
</span>
</div>
</div>
<div class="profile-info-row">
<div class="profile-info-name">邮箱</div>
<div class="profile-info-value">
<span class="editable editable-click">
${user.email!}
</span>
</div>
</div>
<div class="profile-info-row">
<div class="profile-info-name">工号</div>
<div class="profile-info-value">
<span class="editable editable-click">
${user.no!}
</span>
</div>
</div>
<div class="profile-info-row">
<div class="profile-info-name">最后登录IP</div>
<div class="profile-info-value">
<span class="editable editable-click">
${user.loginIp!}
</span>
</div>
</div>
<div class="profile-info-row">
<div class="profile-info-name">最后登录时间</div>
<div class="profile-info-value">
<span class="editable editable-click">
${user.loginDate!,dateFormat="yyyy-MM-dd HH:mm:ss"}
</span>
</div>
</div>
<div class="profile-info-row">
<div class="profile-info-name">创建时间</div>
<div class="profile-info-value">
<span class="editable editable-click">
${user.createDate!,dateFormat="yyyy-MM-dd HH:mm:ss"}
</span>
</div>
</div>
<div class="profile-info-row">
<div class="profile-info-name">更新者</div>
<div class="profile-info-value">
<span class="editable editable-click">
${user.updateBy!}
</span>
</div>
</div>
<div class="profile-info-row">
<div class="profile-info-name">更新时间</div>
<div class="profile-info-value">
<span class="editable editable-click">
${user.updateDate!,dateFormat="yyyy-MM-dd HH:mm:ss"}
</span>
</div>
</div>
<div class="profile-info-row">
<div class="profile-info-name">备注</div>
<div class="profile-info-value">
<span class="editable editable-click">${user.remarks!}</span>
</div>
</div>
</div>
</div> | {'content_hash': 'a428c587d25482ecc87e588d9ba695ea', 'timestamp': '', 'source': 'github', 'line_count': 120, 'max_line_length': 66, 'avg_line_length': 27.383333333333333, 'alnum_prop': 0.6381618989653074, 'repo_name': 'gto5516172/test', 'id': '0a378f5dcd2a19b05da09340bf9c15a360ff4969', 'size': '3372', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'ec_pdm/src/main/webapp/WEB-INF/views/sys/user/user-detail.html', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'CSS', 'bytes': '351662'}, {'name': 'HTML', 'bytes': '826575'}, {'name': 'Java', 'bytes': '834795'}, {'name': 'JavaScript', 'bytes': '1847404'}, {'name': 'PHP', 'bytes': '751'}]} |
module TextileFilter
def self.included(base)
base.class_eval do
Post::Filters << :textile
end
end
def filtered_content
@filtered_content = self.filter.to_sym == :textile ? RedCloth.new(super).to_html.html_safe : super
end
end
| {'content_hash': '1f77cf777787dc72879e1c796ad6b6e8', 'timestamp': '', 'source': 'github', 'line_count': 13, 'max_line_length': 104, 'avg_line_length': 20.846153846153847, 'alnum_prop': 0.6383763837638377, 'repo_name': 'shanesveller/thredded', 'id': '6f60e7f2b2be26f552ef20ceb1fd497a77fafb30', 'size': '271', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'lib/textile_filter.rb', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '27822'}, {'name': 'JavaScript', 'bytes': '13729'}, {'name': 'Ruby', 'bytes': '122098'}]} |
package de.fischer.thotti.core.configuration.hdtest.c;
import com.amazonaws.services.elasticmapreduce.model.ActionOnFailure;
import de.fischer.thotti.core.annotations.DistributedTestJob;
@DistributedTestJob(
id = "idValue1",
onFailure = ActionOnFailure.CANCEL_AND_WAIT)
public class HadoopMahoutJob1 {
}
| {'content_hash': 'f14385fd9c481f96a75fe280361db382', 'timestamp': '', 'source': 'github', 'line_count': 11, 'max_line_length': 69, 'avg_line_length': 29.363636363636363, 'alnum_prop': 0.7863777089783281, 'repo_name': 'sslavic/Thotti', 'id': 'a72e58000333c6ea7228acf45e3b5ccf0c5af7b2', 'size': '323', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'thotticore/src/test/java/de/fischer/thotti/core/configuration/hdtest/c/HadoopMahoutJob1.java', 'mode': '33188', 'license': 'apache-2.0', 'language': []} |
require 'rubygems'
require 'test-unit'
require 'hostsan'
| {'content_hash': 'ba76357bf34b88e424a8cea855663690', 'timestamp': '', 'source': 'github', 'line_count': 4, 'max_line_length': 19, 'avg_line_length': 14.5, 'alnum_prop': 0.7586206896551724, 'repo_name': 'kumakiyo/hostsan', 'id': '3cb21b490a63e7fca65cb15fdaa17b637a307227', 'size': '58', 'binary': False, 'copies': '2', 'ref': 'refs/heads/master', 'path': 'test/test_helper.rb', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'Ruby', 'bytes': '1339'}]} |
import pytest
from tests.helm_template_generator import render_chart
class TestServiceAccountAnnotations:
@pytest.mark.parametrize(
"values,show_only,expected_annotations",
[
(
{
"cleanup": {
"enabled": True,
"serviceAccount": {
"annotations": {
"example": "cleanup",
},
},
},
},
"templates/cleanup/cleanup-serviceaccount.yaml",
{
"example": "cleanup",
},
),
(
{
"scheduler": {
"serviceAccount": {
"annotations": {
"example": "scheduler",
},
},
},
},
"templates/scheduler/scheduler-serviceaccount.yaml",
{
"example": "scheduler",
},
),
(
{
"webserver": {
"serviceAccount": {
"annotations": {
"example": "webserver",
},
},
},
},
"templates/webserver/webserver-serviceaccount.yaml",
{
"example": "webserver",
},
),
(
{
"workers": {
"serviceAccount": {
"annotations": {
"example": "worker",
},
},
},
},
"templates/workers/worker-serviceaccount.yaml",
{
"example": "worker",
},
),
(
{
"flower": {
"serviceAccount": {
"annotations": {
"example": "flower",
},
},
},
},
"templates/flower/flower-serviceaccount.yaml",
{
"example": "flower",
},
),
(
{
"statsd": {
"serviceAccount": {
"annotations": {
"example": "statsd",
},
},
},
},
"templates/statsd/statsd-serviceaccount.yaml",
{
"example": "statsd",
},
),
(
{
"redis": {
"serviceAccount": {
"annotations": {
"example": "redis",
},
},
},
},
"templates/redis/redis-serviceaccount.yaml",
{
"example": "redis",
},
),
(
{
"pgbouncer": {
"enabled": True,
"serviceAccount": {
"annotations": {
"example": "pgbouncer",
},
},
},
},
"templates/pgbouncer/pgbouncer-serviceaccount.yaml",
{
"example": "pgbouncer",
},
),
(
{
"createUserJob": {
"serviceAccount": {
"annotations": {
"example": "createuser",
},
},
},
},
"templates/jobs/create-user-job-serviceaccount.yaml",
{
"example": "createuser",
},
),
(
{
"migrateDatabaseJob": {
"serviceAccount": {
"annotations": {
"example": "migratedb",
},
},
},
},
"templates/jobs/migrate-database-job-serviceaccount.yaml",
{
"example": "migratedb",
},
),
(
{
"triggerer": {
"serviceAccount": {
"annotations": {
"example": "triggerer",
},
},
},
},
"templates/triggerer/triggerer-serviceaccount.yaml",
{
"example": "triggerer",
},
),
],
)
def test_annotations_are_added(self, values, show_only, expected_annotations):
k8s_objects = render_chart(
values=values,
show_only=[show_only],
)
# This test relies on the convention that the helm chart puts a single
# ServiceAccount in its own .yaml file, so by specifying `show_only`,
# we should only get a single k8s_object here - the target object that
# we hope to test on.
assert len(k8s_objects) == 1
obj = k8s_objects[0]
for k, v in expected_annotations.items():
assert k in obj["metadata"]["annotations"]
assert v == obj["metadata"]["annotations"][k]
@pytest.mark.parametrize(
"values,show_only,expected_annotations",
[
(
{
"scheduler": {
"podAnnotations": {
"example": "scheduler",
},
},
},
"templates/scheduler/scheduler-deployment.yaml",
{
"example": "scheduler",
},
),
(
{
"webserver": {
"podAnnotations": {
"example": "webserver",
},
},
},
"templates/webserver/webserver-deployment.yaml",
{
"example": "webserver",
},
),
(
{
"workers": {
"podAnnotations": {
"example": "worker",
},
},
},
"templates/workers/worker-deployment.yaml",
{
"example": "worker",
},
),
(
{
"flower": {
"podAnnotations": {
"example": "flower",
},
},
},
"templates/flower/flower-deployment.yaml",
{
"example": "flower",
},
),
(
{
"triggerer": {
"podAnnotations": {
"example": "triggerer",
},
},
},
"templates/triggerer/triggerer-deployment.yaml",
{
"example": "triggerer",
},
),
],
)
class TestPerComponentPodAnnotations:
def test_annotations_are_added(self, values, show_only, expected_annotations):
k8s_objects = render_chart(
values=values,
show_only=[show_only],
)
# This test relies on the convention that the helm chart puts a single
# Deployment in its own .yaml file, so by specifying `show_only`,
# we should only get a single k8s_object here - the target object that
# we hope to test on.
assert len(k8s_objects) == 1
obj = k8s_objects[0]
for k, v in expected_annotations.items():
assert k in obj["spec"]["template"]["metadata"]["annotations"]
assert v == obj["spec"]["template"]["metadata"]["annotations"][k]
def test_precedence(self, values, show_only, expected_annotations):
values_global_annotations = {"airflowPodAnnotations": {k: "GLOBAL" for k in expected_annotations}}
values_merged = {**values, **values_global_annotations}
k8s_objects = render_chart(
values=values_merged,
show_only=[show_only],
)
# This test relies on the convention that the helm chart puts a single
# Deployment in its own .yaml file, so by specifying `show_only`,
# we should only get a single k8s_object here - the target object that
# we hope to test on.
assert len(k8s_objects) == 1
obj = k8s_objects[0]
for k, v in expected_annotations.items():
assert k in obj["spec"]["template"]["metadata"]["annotations"]
assert v == obj["spec"]["template"]["metadata"]["annotations"][k]
| {'content_hash': '20e7e702c36c191772ade0d30c5b4bff', 'timestamp': '', 'source': 'github', 'line_count': 304, 'max_line_length': 106, 'avg_line_length': 31.585526315789473, 'alnum_prop': 0.34076234117892107, 'repo_name': 'apache/incubator-airflow', 'id': '28d462a215b97831ade348058d961b7788486232', 'size': '10388', 'binary': False, 'copies': '2', 'ref': 'refs/heads/main', 'path': 'chart/tests/test_annotations.py', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'CSS', 'bytes': '69070'}, {'name': 'Dockerfile', 'bytes': '2001'}, {'name': 'HTML', 'bytes': '283783'}, {'name': 'JavaScript', 'bytes': '1387552'}, {'name': 'Mako', 'bytes': '1284'}, {'name': 'Python', 'bytes': '5482822'}, {'name': 'Shell', 'bytes': '40957'}]} |
{-# LANGUAGE Arrows #-}
module Main (main) where
--------------------------------------------------------------------------------
import Control.Monad.State.Strict
import Control.Monad.State.Class
import Control.Monad hiding (unless)
import Control.Monad.Random
import Control.Wire as W hiding ((.), merge, id)
import Control.Wire.Unsafe.Event hiding (merge)
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Function (on)
import Data.List (groupBy, nubBy)
import Data.Tuple (swap)
import Data.Char (isAsciiLower, isSpace)
import Data.Array
import FRP.Netwire.Input
import qualified Graphics.UI.GLFW as GLFW
import qualified Lambency as L
import Linear hiding (trace)
import Debug.Trace
import System.IO.Unsafe -- !KLUDGE! ugh
import Grid
--------------------------------------------------------------------------------
type Dict = Set String
screenSizeX :: Int
screenSizeX = 800
screenSizeY :: Int
screenSizeY = 600
gameDimX :: Int
gameDimX = 40
gameDimY :: Int
gameDimY = 30
letterSzX :: Int
letterSzX
| screenSizeX `mod` gameDimX == 0 = screenSizeX `div` gameDimX
| otherwise = error "Screen size is not multiple of game dimension along x axis"
letterSzY :: Int
letterSzY
| screenSizeY `mod` gameDimY == 0 = screenSizeY `div` gameDimY
| otherwise = error "Screen size is not multiple of game dimension along y axis"
numberString :: [Char]
numberString = concat . repeat $ "9876543210"
merge :: [a] -> [a] -> [a]
merge [] ys = ys
merge (x:xs) ys = x : merge ys xs
mkGameRow :: (Functor m, MonadRandom m) => m String
mkGameRow = filter isAsciiLower <$> getRandomRs ('a', 'z')
mkGameRowWithString :: (Functor m, MonadRandom m) => String -> Int -> m String
mkGameRowWithString str pos = do
(first, rest) <- splitAt pos <$> mkGameRow
return $ first ++ merge str rest
startGameString :: (Functor m, MonadRandom m) => m String
startGameString = do
rc <- head <$> mkGameRow
mkGameRowWithString (concat ["start", [rc], "game"]) 10
quitGameString :: (Functor m, MonadRandom m) => m String
quitGameString = mkGameRowWithString "quit" 10
gameOverString :: (Functor m, MonadRandom m) => m String
gameOverString = mkGameRowWithString "over" 22
gameBoardString :: (Functor m, MonadRandom m) => m (Array (Int, Int) Char)
gameBoardString = do
firstRows <- replicateM ((gameDimY `div` 2) - 1) mkGameRow
startGame <- startGameString
gameOver <- gameOverString
quit <- quitGameString
lastRows <- replicateM ((gameDimY `div` 2) - 4) mkGameRow
let rows = concat [[numberString],
firstRows,
[startGame, gameOver, quit],
lastRows,
[numberString]]
idxdList = zip [1..] $ take gameDimY $ map (zip [1..] . take gameDimX) rows
setRow row (col, x) = ((col, row), x)
arrList = concat $ (\(row, list) -> setRow row <$> list) <$> idxdList
return $ array ((1, 1), (gameDimX, gameDimY)) arrList
camera :: L.GameWire a L.Camera
camera = pure zero >>> (L.mk2DCam screenSizeX screenSizeY)
drawChar :: L.Font -> ((Int, Int), Char) -> L.GameMonad ()
drawChar fnt ((x, y), c) =
L.renderUIString fnt [c] $ fromIntegral <$> (letterPos ^+^ offset)
where
letterPos = V2 ((x - 1) * letterSzX) (screenSizeY - y * letterSzY)
offset = V2 4 2
data LetterState
= LetterChanging
| LetterStatic
deriving (Ord, Eq, Show, Enum, Bounded)
data LetterInput
= ChangeImmediately !(V3 Float)
| ChangeGradually !Float !(V3 Float)
deriving (Ord, Eq, Show)
data LetterOutput = LetterOutput {
outputChar :: !Char,
outputColor :: !(V3 Float),
outputClick :: !Bool,
outputState :: !LetterState
} deriving (Ord, Eq, Show)
type LetterWire = L.GameWire (Event LetterInput) LetterOutput
mkWiresFromChars :: Array (Int, Int) Char -> Array (Int, Int) (LetterOutput, LetterWire)
mkWiresFromChars arr = array (bounds arr) $ (\(ix, c) ->
let output = LetterOutput c (V3 1 1 1) False LetterStatic
wire = letterWire (V3 1 1 1) (ix, c)
in (ix, (output, wire))) <$> (assocs arr)
resetModes :: (Functor m, Monad m, Monoid s) => k -> (k -> Wire s e m a b) -> Wire s e m (a, Event k) b
resetModes initial f = second (arr (fmap f)) >>> rSwitch (f initial)
letterWire :: V3 Float -> ((Int, Int), Char) -> LetterWire
letterWire initialColor (pos, c) = proc x -> do
out <- handleColor initialColor -< x
returnA -< out
where
colorFeedback :: V3 Float -> L.GameWire (Event LetterInput, V3 Float) (LetterOutput, V3 Float)
colorFeedback init =
let lerpWire :: Float -> V3 Float -> V3 Float -> L.GameWire a LetterOutput
lerpWire duration start end = proc x -> do
t <- timeF / pure duration -< x
color <- (arr $ \t' -> lerp t' end start) -< t
returnA -< (LetterOutput c color False LetterChanging)
pureColor color = proc x -> do
returnA -< (LetterOutput c color False LetterStatic)
modeSelect :: LetterInput -> L.GameWire (V3 Float) LetterOutput
modeSelect (ChangeImmediately newColor) = pureColor newColor
modeSelect (ChangeGradually t newColor) =
mkSFN $ \oldColor -> (LetterOutput c oldColor False LetterChanging,
(lerpWire t oldColor newColor >>> for t) --> pureColor newColor)
in (arr swap) >>> (resetModes (ChangeImmediately init) modeSelect) >>> (mkId &&& (arr outputColor))
handleColor :: V3 Float -> L.GameWire (Event LetterInput) LetterOutput
handleColor c = loop $ second (delay c) >>> colorFeedback c
type StageInput = Array (Int, Int) LetterOutput
type StageOutput = Array (Int, Int) (Maybe LetterInput)
type StageWire = L.GameWire StageInput StageOutput
handleLetters :: L.TimeStep -> StageOutput -> Array (Int, Int) (LetterOutput, LetterWire) ->
L.GameMonad (StageInput, Array (Int, Int) (LetterOutput, LetterWire))
handleLetters ts letterIpts letterWires = do
arr <- mapM runWire $ do
ix <- indices letterIpts
let li = letterIpts ! ix
(lst, lw) = letterWires ! ix
guard (li /= Nothing || outputState lst == LetterChanging)
return (ix, (li, lw))
return $ ((fst <$> letterWires) // (map (fst <$>) arr),
(letterWires // arr))
where
runWire :: (i, (Maybe LetterInput, LetterWire)) -> L.GameMonad (i, (LetterOutput, LetterWire))
runWire (x, (Nothing, wire)) = do
(Right output, newWire) <- stepWire wire ts (Right NoEvent)
return (x, (output, newWire))
runWire (x, (Just ipt, wire)) = do
(Right output, newWire) <- stepWire wire ts (Right $ Event ipt)
return (x, (output, newWire))
renderLetters :: L.Font -> Array (Int, Int) LetterOutput -> L.GameMonad ()
renderLetters font arr =
let charGrps = groupBy (\(_, LetterOutput _ x _ _) (_, LetterOutput _ y _ _) -> x == y) $ assocs arr
in mapM_ (\(chars@((_, LetterOutput _ color _ _) : _)) ->
let newFont = L.setFontColor color font
in mapM_ (\(ix, LetterOutput c _ _ _) -> do
drawChar newFont (ix, c)) chars) charGrps
checkClicked :: StageInput -> L.GameMonad StageInput
checkClicked ipt = do
pressed <- mbIsPressed GLFW.MouseButton'1
case pressed of
False -> return ipt
True -> do
(mx, my) <- cursor
-- releaseButton GLFW.MouseButton'1
let sx = (mx * 0.5 + 0.5) * (fromIntegral screenSizeX)
sy = (my * 0.5 + 0.5) * (fromIntegral screenSizeY)
x = ceiling $ sx / (fromIntegral letterSzX)
y = ceiling $ sy / (fromIntegral letterSzY)
LetterOutput c color _ st = ipt ! (x, y)
return $ ipt // [((x, y), LetterOutput c color True st)]
mkGame :: StageWire -> IO (L.GameWire () ())
mkGame w = do
board <- gameBoardString
let initialLetters = mkWiresFromChars board
dummyStageOutput = (\c -> LetterOutput c (V3 1 1 1) False LetterStatic) <$> board
font <- L.loadTTFont 18 (V3 1 1 1) "kenpixel.ttf"
return $ runStage font initialLetters dummyStageOutput w
where
runStage :: L.Font -> Array (Int, Int) (LetterOutput, LetterWire) -> StageInput -> StageWire -> L.GameWire () ()
runStage font letters ipt w = mkGen $ \ts _ -> do
(result, nextStage) <- checkClicked ipt >>= (stepWire w ts . Right)
case result of
Left x -> return (Left x, mkEmpty)
Right letterInput -> do
(nextIpt, nextLetters) <- handleLetters ts letterInput letters
renderLetters font nextIpt
return $ (Right (), runStage font nextLetters nextIpt nextStage)
mkStaticStageInput :: (Int -> Int -> a) -> Array (Int, Int) a
mkStaticStageInput f = array ((1, 1), (gameDimX, gameDimY)) [((x, y), f x y) | x <- [1..gameDimX], y <- [1..gameDimY]]
titlePositions :: [(Int, Int)]
titlePositions = (\(x, y) -> (x + 1, y)) <$> [
{- L -} (4, 4), (4, 5), (4, 6), (4, 7), (4, 8), (5, 8), (6, 8),
{- I -} (8, 4), (9, 4), (10, 4), (9, 5), (9, 6), (9, 7), (9, 8), (8, 8), (10, 8),
{- T -} (12, 4), (13, 4), (14, 4), (13, 5), (13, 6), (13, 7), (13, 8),
{- E -} (16, 4), (17, 4), (18, 4), (16, 5), (16, 6), (17, 6), (16, 7), (16, 8), (17, 8), (18, 8),
{- R -} (20, 4), (21, 4), (22, 4), (20, 5), (22, 5), (20, 6), (21, 6), (20, 7), (22, 7), (20, 8), (22, 8),
{- A -} (24, 4), (25, 4), (26, 4), (24, 5), (26, 5), (24, 6), (25, 6), (26, 6), (24, 7), (26, 7), (24, 8), (26, 8),
{- T -} (28, 4), (29, 4), (30, 4), (29, 5), (29, 6), (29, 7), (29, 8),
{- E -} (32, 4), (33, 4), (34, 4), (32, 5), (32, 6), (33, 6), (32, 7), (32, 8), (33, 8), (34, 8)
]
startGamePositions :: [(Int, Int)]
startGamePositions = [(11 + x, 16) | x <- [0,2,4,6,8,12,14,16,18]]
gameOverPositions :: [(Int, Int)]
gameOverPositions = [(23 + x, 16) | x <- [0,2,4,6]] ++ [(23 + x, 17) | x <- [0, 2, 4, 6]]
quitGamePositions :: [(Int, Int)]
quitGamePositions = [(11 + x, 18) | x <- [0,2,4,6]]
idleStage :: StageOutput
idleStage = mkStaticStageInput (\x y -> Nothing)
prepareStage :: StageWire -> StageWire
prepareStage = (delay mkBoard <<<)
where
mkBoard = mkStaticStageInput inputBoard
inputBoard _ _ = Just $ ChangeImmediately (V3 0.1 0.1 0.1)
introSequence :: StageWire
introSequence = prepareStage (pure idleStage >>> delay fadeToMenu >>> for timeToFade)
where
timeToFade = 2.0
fadeToMenu = mkStaticStageInput introInput
fadeToWhite = Just $ ChangeGradually timeToFade (V3 1 1 1)
introInput :: Int -> Int -> Maybe LetterInput
introInput 1 _ = fadeToWhite
introInput _ 1 = fadeToWhite
introInput x y
| x == gameDimX = fadeToWhite
| y == gameDimY = fadeToWhite
| (x, y) `elem` titlePositions = Just $ ChangeGradually timeToFade (V3 0 0.9 0.9)
| (x, y) `elem` kMenuPositions = fadeToWhite
| otherwise = Nothing
startGame :: Dict -> StageWire
startGame dict = prepareStage $ (pure idleStage >>> delay fadeToGame >>> mkEmpty) --> (gameWire dict)
where
fadeToGame = mkStaticStageInput introInput
introInput :: Int -> Int -> Maybe LetterInput
introInput _ 1 = Nothing
introInput x y
| y == gameDimY = Nothing
| otherwise = Just $ ChangeImmediately (V3 0.1 0.1 0.1)
gameOver :: Dict -> StageWire
gameOver dict = (pure idleStage >>> delay fadeToGameOver >>>
((mkId >>> for timeToFade) --> L.quitWire GLFW.Key'Space)) -->
introSequence -->
(gameMenu dict)
where
timeToFade = 2.0
fadeToGameOver = mkStaticStageInput gameOverInput
gameOverInput :: Int -> Int -> Maybe LetterInput
gameOverInput _ 1 = Nothing
gameOverInput x y
| y == gameDimY = Nothing
| (x, y) `elem` gameOverPositions = Just $ ChangeGradually timeToFade (V3 0.4 0.1 0.1)
| otherwise = Just $ ChangeImmediately (V3 0.1 0.1 0.1)
numberToPositions :: Int -> Int -> [(Int, Int)]
numberToPositions x row =
let ones = x `mod` 10
tens = (x `mod` 100) `div` 10
hunds = (x `mod` 1000) `div` 100
thous = (x `mod` 10000) `div` 1000
in (\(x,y) -> (40 - x, y)) <$> [(ones, row), (10 + tens, row), (20 + hunds, row), (30 + thous, row)]
numberFade = 0.2
setNumberRow :: Int -> Int -> V3 Float -> StageOutput -> StageOutput
setNumberRow x row color opt =
let numberPos = numberToPositions x row
allPos = [(x, row) | x <- [1..gameDimX]]
f (x, y)
| (x, y) `elem` numberPos = Just $ ChangeGradually numberFade color
| otherwise = Just $ ChangeGradually numberFade (V3 1 1 1)
in opt // ((\x -> (x, f x)) <$> allPos)
setScore :: Int -> StageOutput -> StageOutput
setScore x = setNumberRow x 1 (V3 0.9 0.1 0.1)
setTime :: Int -> StageOutput -> StageOutput
setTime x = setNumberRow x gameDimY (V3 0.2 0.6 0.2)
intWire :: (Int -> StageOutput -> StageOutput) -> L.GameWire (StageOutput, Int) StageOutput
intWire f = mkSFN $ \(output, x) -> (f x output, loop $ second (delay x) >>> intFeedback)
where
intFeedback :: L.GameWire ((StageOutput, Int), Int) (StageOutput, Int)
intFeedback = mkSF_ $ \((output, x), x') -> (if x == x' then output else f x output, x)
data BoardChar = BoardChar Char (Int, Int)
boardString :: [BoardChar] -> String
boardString = map (\(BoardChar c _) -> c)
data LState = LState {
stateGen :: StdGen,
timeTillLetter :: Float,
letterWires :: [((Int, Int), L.GameWire () (Maybe LetterInput))],
currentString :: [BoardChar],
currentScore :: Int
}
initialState :: LState
initialState = LState {
stateGen = unsafePerformIO getStdGen, -- !KLUDGE! ick
timeTillLetter = 0.0,
letterWires = [],
currentString = [],
currentScore = 0
}
pulseLetter :: Float -> L.GameWire () (Maybe LetterInput)
pulseLetter fadeTime =
(pure Nothing >>> delay (Just $ ChangeGradually fadeTime (V3 0.2 0.9 0.9)) >>> for fadeTime) -->
(pure Nothing >>> for (2.0 * fadeTime)) -->
(pure Nothing >>> delay (Just $ ChangeGradually fadeTime (V3 0.1 0.1 0.1)) >>> for fadeTime)
type LetterGameWire = ((Int, Int), L.GameWire () (Maybe LetterInput))
genNewLetterGameWire :: StateT LState L.GameMonad LetterGameWire
genNewLetterGameWire = do
lstate <- get
let (x, g') = randomR (1, gameDimX) (stateGen lstate)
(y, g'') = randomR (2, gameDimY - 1) g'
(fadeTime, g''') = randomR (0.5, 1.0) g''
put $ lstate { stateGen = g''' }
return ((x, y), pulseLetter fadeTime)
mkNewLetterGameWire :: StateT LState L.GameMonad ()
mkNewLetterGameWire = do
lstate <- get
w@(pos, _) <- genNewLetterGameWire
case (any (\(x, _) -> x == pos) (letterWires lstate)) of
True -> mkNewLetterGameWire
False -> put $ lstate { letterWires = w : (letterWires lstate) }
runLetter :: L.TimeStep -> StageInput -> LetterGameWire -> StateT LState L.GameMonad [(Maybe LetterInput, LetterGameWire)]
runLetter ts ipt (pos, w) = do
let LetterOutput c _ click _ = ipt ! pos
case click of
False -> do
(result, w') <- lift $ stepWire w ts (Right ())
case result of
Left _ -> return []
Right x -> return [(x, (pos, w'))]
True -> do
lstate <- get
put $ lstate { currentString = (BoardChar c pos) : (currentString lstate) }
return [(Just $ ChangeImmediately (V3 0.9 0.9 0.2), (pos, mkConst $ Right Nothing))]
debounceKey :: GLFW.Key -> L.GameMonad Bool
debounceKey key = do
isPressed <- keyIsPressed key
case isPressed of
True -> releaseKey key
False -> return ()
return isPressed
runLetters :: Dict -> L.TimeStep -> StageInput ->
StateT LState L.GameMonad [(Maybe LetterInput, LetterGameWire)]
runLetters dict ts ipt = do
currentLetters <- letterWires <$> get
letters <- concat <$> mapM (runLetter ts ipt) currentLetters
space <- lift $ debounceKey GLFW.Key'Space
lstate <- get
case space of
False -> return letters
True -> do
let str = boardString . nubBy ((==) `on` (\(BoardChar _ p) -> p)) . reverse . currentString $ lstate
put $ lstate {
currentString = [],
letterWires = []}
trace ("Checking string: " ++ str) $ return ()
case (Set.member str dict) of
True -> modify $ \st -> st { currentScore = (currentScore st) + (length str) }
False -> return ()
return $ (\(_, w) -> ((Just $ ChangeImmediately (V3 0.1 0.1 0.1)), w)) <$> letters
gameWire :: Dict -> StageWire
gameWire dict =
((runTime &&& runGame initialState (runGameState >>> setScoreWire)) >>> setTimeWire) --> gameOver dict
where
startTime = 100.0
runTime :: L.GameWire a Float
runTime = timer startTime >>> W.when (>0)
timer :: Float -> L.GameWire a Float
timer start = mkSF $ \ts _ ->
let newTime = start - (dtime ts)
in (newTime, timer newTime)
setTimeWire :: L.GameWire (Float, StageOutput) StageOutput
setTimeWire = arr (fmap round . swap) >>> (intWire setTime)
setScoreWire :: Wire L.TimeStep String (StateT LState L.GameMonad) StageOutput StageOutput
setScoreWire = runScoreWire (intWire setScore)
where
runScoreWire w = mkGen $ \ts output -> do
score <- currentScore <$> get
(Right out, nw) <- lift $ stepWire w ts (Right (output, score))
return (Right out, runScoreWire nw)
runGameState :: Wire L.TimeStep String (StateT LState L.GameMonad) StageInput StageOutput
runGameState = mkGen $ \ts ipt -> do
lstate <- get
case (timeTillLetter lstate <= 0.0) of
False -> put $ lstate { timeTillLetter = (timeTillLetter lstate) - (dtime ts) }
True -> do
let (x, newg) = randomR (0.05, 0.2) (stateGen lstate)
put $ lstate { timeTillLetter = x }
mkNewLetterGameWire
letterFades <- runLetters dict ts ipt
modify $ \st -> st { letterWires = snd <$> letterFades }
let out = idleStage // ((\(ipt, (pos, _)) -> (pos, ipt)) <$> letterFades)
return $ (Right out, runGameState)
runGame :: LState
-> Wire L.TimeStep String (StateT LState L.GameMonad) StageInput StageOutput
-> L.GameWire StageInput StageOutput
runGame state gsw = mkGen $ \ts ipt -> do
((result, nextW), nextState) <- runStateT (stepWire gsw ts (Right ipt)) state
return (result, runGame nextState nextW)
kNumMenuOptions :: Int
kNumMenuOptions = 2
kMenuPositions :: [(Int, Int)]
kMenuPositions = concat [startGamePositions, quitGamePositions]
gameMenu :: Dict -> StageWire
gameMenu dict = runWire (selection 0 >>> flickerSelection)
where
-- If enter is pressed then the wire inhibits forever with a value
-- otherwise identity wire
menuChoice :: L.GameWire Int Int
menuChoice = mkGenN $ \x -> do
enter <- keyIsPressed GLFW.Key'Enter
case enter of
False -> return (Right x, menuChoice)
True ->
case x of
0 -> return (Left "Game", inhibit "Game")
1 -> return (Left "Quit", inhibit "Quit")
_ -> return (Left mempty, mkEmpty)
selectionFeedback (f, x) = let y = f x in (y, y)
selection x =
((keyDebounced GLFW.Key'Up >>> pure ((`mod` kNumMenuOptions) . (+1))) <|>
(keyDebounced GLFW.Key'Down >>> pure ((`mod` kNumMenuOptions) . (+(kNumMenuOptions-1)))) <|>
pure id) >>>
(loop $ second (delay x) >>> (arr selectionFeedback)) >>>
menuChoice
flickerTime = 0.4
pulseColor :: V3 Float -> [(Int, Int)] -> StageOutput
pulseColor c positions = mkStaticStageInput findPos
where
findPos x y
| (x, y) `elem` positions = Just $ ChangeGradually flickerTime c
| (x, y) `elem` kMenuPositions = Just $ ChangeImmediately (V3 1 1 1)
| otherwise = Nothing
pulseYellow = pulseColor (V3 1 1 0)
pulseWhite = pulseColor (V3 1 1 1)
flickerSelection =
((W.when (== 0)) >>>
((pure idleStage >>> delay (pulseYellow startGamePositions) >>> for flickerTime) -->
(pure idleStage >>> delay (pulseWhite startGamePositions) >>> for flickerTime))) -->
((W.when (== 1)) >>>
((pure idleStage >>> delay (pulseYellow quitGamePositions) >>> for flickerTime) -->
(pure idleStage >>> delay (pulseWhite quitGamePositions) >>> for flickerTime))) -->
flickerSelection
runWire w = mkGen $ \ts ipt -> do
(result, w') <- stepWire w ts (Right ipt)
case result of
Left "Game" -> return (Right idleStage, startGame dict)
Left "Quit" -> return (Right idleStage, mkEmpty)
Left x -> error $ "Unknown menu option: " ++ x
Right x -> return (Right x, runWire w')
initialStage :: L.GameWire a StageOutput
initialStage =
let startArray = array ((1, 1), (gameDimX, gameDimY))
[((x, y), Just $ ChangeGradually 1.0 (V3 1 1 0)) | x <- [1..gameDimX], y <- [1..gameDimY]]
in
(pure $ (\_ -> Nothing) <$> startArray) >>> delay startArray
trim :: String -> String
trim = f . f where f = reverse . dropWhile isSpace
initGame :: IO (L.Game ())
initGame = do
putStr "Creating dictionary..."
dict <- foldr Set.insert Set.empty . map trim . lines <$> readFile "dict.txt"
putStrLn "Done."
g <- mkGame (introSequence --> gameMenu dict)
return $ L.Game {
L.staticLights = [],
L.staticGeometry = [],
L.mainCamera = camera,
L.dynamicLights = [],
L.gameLogic = g }
main :: IO ()
main = L.withWindow screenSizeX screenSizeY "Literate" $
L.loadAndRun () initGame
| {'content_hash': '47910ccb75327bad912975c7f2f31998', 'timestamp': '', 'source': 'github', 'line_count': 561, 'max_line_length': 122, 'avg_line_length': 38.101604278074866, 'alnum_prop': 0.6093567251461989, 'repo_name': 'Mokosha/literate-ludum-dare-31', 'id': '03ef6549fa54c84079b3ad9bdb38cf16fe735d3d', 'size': '21375', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'Main.hs', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'Haskell', 'bytes': '26787'}]} |
/*
This software is allowed to use under GPL or you need to obtain Commercial or Enterise License
to use it in non-GPL project. Please contact [email protected] for details
*/
(function() {
var dx, dy;
function clean_html(val) {
return val.replace(newline_regexp, "\n").replace(html_regexp, "");
}
function x_norm(x, offset) {
x = parseFloat(x);
offset = parseFloat(offset);
if (!isNaN(offset)) x -= offset;
var w = colsWidth(x);
x = x - w.width + w.cols*dx;
return isNaN(x)?"auto":(100*x/(dx));
}
function x_norm_event(x, offset, is_left) {
x = parseFloat(x);
offset = parseFloat(offset);
if (!isNaN(offset) && is_left) x -= offset;
var w = colsWidth(x);
x = x - w.width + w.cols*dx;
return isNaN(x)?"auto":(100*x/(dx-(!isNaN(offset)?offset:0)));
}
function colsWidth(width) {
var r = 0;
var header = scheduler._els.dhx_cal_header[0].childNodes;
var els = header[1] ? header[1].childNodes : header[0].childNodes;
for (var i = 0; i < els.length; i++) {
var el = els[i].style ? els[i] : els[i].parentNode;
var w = parseFloat(el.style.width);
if (width > w)
width -= (w+1),r+=(w+1);
else
break;
}
return { width: r, cols: i };
}
function y_norm(y) {
y = parseFloat(y);
if (isNaN(y)) return "auto";
return 100 * y / dy;
}
function get_style(node, style){
return (window.getComputedStyle?(window.getComputedStyle(node, null)[style]):(node.currentStyle?node.currentStyle[style]:null))||"";
}
function de_day(node, n) {
var x = parseInt(node.style.left, 10);
for (var dx = 0; dx < scheduler._cols.length; dx++) {
x -= scheduler._cols[dx];
if (x < 0) return dx;
}
return n;
}
function de_week(node, n) {
var y = parseInt(node.style.top, 10);
for (var dy = 0; dy < scheduler._colsS.heights.length; dy++)
if (scheduler._colsS.heights[dy] > y) return dy;
return n;
}
function xml_start(tag) {
return tag ? "<"+tag+">" : "";
}
function xml_end(tag) {
return tag ? "</"+tag+">" : "";
}
function xml_top(tag, profile, header, footer) {
var xml = "<"+tag+" profile='" + profile + "'";
if (header)
xml += " header='" + header + "'";
if (footer)
xml += " footer='" + footer + "'";
xml += ">";
return xml;
}
function xml_body_header() {
var xml = "";
// detects if current mode is timeline
var mode = scheduler._mode;
if (scheduler.matrix && scheduler.matrix[scheduler._mode])
mode = (scheduler.matrix[scheduler._mode].render == "cell") ? "matrix" : "timeline";
xml += "<scale mode='" + mode + "' today='" + scheduler._els.dhx_cal_date[0].innerHTML + "'>";
if (scheduler._mode == "week_agenda") {
var xh = scheduler._els.dhx_cal_data[0].getElementsByTagName("DIV");
for (var i = 0; i < xh.length; i++)
if (xh[i].className == "dhx_wa_scale_bar")
xml += "<column>" + clean_html(xh[i].innerHTML) + "</column>";
} else if (scheduler._mode == "agenda" || scheduler._mode == "map") {
var xh = scheduler._els.dhx_cal_header[0].childNodes[0].childNodes;
xml += "<column>" + clean_html(xh[0].innerHTML) + "</column><column>" + clean_html(xh[1].innerHTML) + "</column>";
} else if (scheduler._mode == "year") {
var xh = scheduler._els.dhx_cal_data[0].childNodes;
for (var i = 0; i < xh.length; i++) {
xml += "<month label='" + clean_html(xh[i].childNodes[0].innerHTML) + "'>";
xml += xml_month_scale(xh[i].childNodes[1].childNodes);
xml += xml_month(xh[i].childNodes[2]);
xml += "</month>";
}
} else {
xml += "<x>";
var xh = scheduler._els.dhx_cal_header[0].childNodes;
xml += xml_month_scale(xh);
xml += "</x>";
var yh = scheduler._els.dhx_cal_data[0];
if (scheduler.matrix && scheduler.matrix[scheduler._mode]) {
xml += "<y>";
for (var i = 0; i < yh.firstChild.rows.length; i++) {
var el = yh.firstChild.rows[i];
xml += "<row><![CDATA[" + clean_html(el.cells[0].innerHTML) + "]]></row>";
}
xml += "</y>";
dy = yh.firstChild.rows[0].cells[0].offsetHeight;
} else if (yh.firstChild.tagName == "TABLE") {
xml += xml_month(yh);
} else {
yh = yh.childNodes[yh.childNodes.length - 1];
while (yh.className.indexOf("dhx_scale_holder") == -1)
yh = yh.previousSibling;
yh = yh.childNodes;
xml += "<y>";
for (var i = 0; i < yh.length; i++)
xml += "\n<row><![CDATA[" + clean_html(yh[i].innerHTML) + "]]></row>";
xml += "</y>";
dy = yh[0].offsetHeight;
}
}
xml += "</scale>";
return xml;
}
function xml_month(yh) {
var xml = "";
var r = yh.firstChild.rows;
for (var i = 0; i < r.length; i++) {
var days = [];
for (var j = 0; j < r[i].cells.length; j++)
days.push(r[i].cells[j].firstChild.innerHTML);
xml += "\n<row height='" + yh.firstChild.rows[i].cells[0].offsetHeight + "'><![CDATA[" + clean_html(days.join("|")) + "]]></row>";
dy = yh.firstChild.rows[0].cells[0].offsetHeight;
}
return xml;
}
function xml_month_scale(xh) {
var xml = "";
if (scheduler.matrix && scheduler.matrix[scheduler._mode]) {
if (scheduler.matrix[scheduler._mode].second_scale)
var xhs = xh[1].childNodes;
xh = xh[0].childNodes;
}
for (var i = 0; i < xh.length; i++)
xml += "\n<column><![CDATA[" + clean_html(xh[i].innerHTML) + "]]></column>";
dx = xh[0].offsetWidth;
if (xhs) {
var width = 0;
var top_width = xh[0].offsetWidth;
var top_col = 1;
for (var i = 0; i < xhs.length; i++) {
xml += "\n<column second_scale='" + top_col + "'><![CDATA[" + clean_html(xhs[i].innerHTML) + "]]></column>";
width += xhs[i].offsetWidth;
if (width >= top_width) {
top_width += (xh[top_col] ? xh[top_col].offsetWidth : 0);
top_col++;
}
dx = xhs[0].offsetWidth;
}
}
return xml;
}
function xml_body(colors) {
var xml = "";
var evs = scheduler._rendered;
var matrix = scheduler.matrix && scheduler.matrix[scheduler._mode];
if (scheduler._mode == "agenda" || scheduler._mode == "map") {
for (var i = 0; i < evs.length; i++)
xml += "<event><head><![CDATA[" + clean_html(evs[i].childNodes[0].innerHTML) + "]]></head><body><![CDATA[" + clean_html(evs[i].childNodes[2].innerHTML) + "]]></body></event>";
} else if (scheduler._mode == "week_agenda") {
for (var i = 0; i < evs.length; i++)
xml += "<event day='" + evs[i].parentNode.getAttribute("day") + "'><body>" + clean_html(evs[i].innerHTML) + "</body></event>";
} else if (scheduler._mode == "year") {
var evs = scheduler.get_visible_events();
for (var i = 0; i < evs.length; i++) {
var d = evs[i].start_date;
if (d.valueOf() < scheduler._min_date.valueOf())
d = scheduler._min_date;
while (d < evs[i].end_date) {
var m = d.getMonth() + 12 * (d.getFullYear() - scheduler._min_date.getFullYear()) - scheduler.week_starts._month;
var day = scheduler.week_starts[m] + d.getDate() - 1;
var text_color = colors ? get_style(scheduler._get_year_cell(d), "color") : "";
var bg_color = colors ? get_style(scheduler._get_year_cell(d), "backgroundColor") : "";
xml += "<event day='" + (day % 7) + "' week='" + Math.floor(day / 7) + "' month='" + m + "' backgroundColor='" + bg_color + "' color='" + text_color + "'></event>";
d = scheduler.date.add(d, 1, "day");
if (d.valueOf() >= scheduler._max_date.valueOf())
break;
}
}
} else if (matrix && matrix.render == "cell") {
var evs = scheduler._els.dhx_cal_data[0].getElementsByTagName("TD");
for (var i = 0; i < evs.length; i++) {
var text_color = colors ? get_style(evs[i], "color") : "";
var bg_color = colors ? get_style(evs[i], "backgroundColor") : "";
xml += "\n<event><body backgroundColor='" + bg_color + "' color='" + text_color + "'><![CDATA[" + clean_html(evs[i].innerHTML) + "]]></body></event>";
}
} else {
for (var i = 0; i < evs.length; i++) {
var zx, zdx;
if (scheduler.matrix && scheduler.matrix[scheduler._mode]) {
// logic for timeline view
zx = x_norm(evs[i].style.left);
zdx = x_norm(evs[i].offsetWidth)-1;
} else {
// we should use specific logic for day/week/units view
var left_norm = scheduler.config.use_select_menu_space ? 0 : 26;
zx = x_norm_event(evs[i].style.left, left_norm, true);
zdx = x_norm_event(evs[i].style.width, left_norm)-1;
}
if (isNaN(zdx * 1)) continue;
var zy = y_norm(evs[i].style.top);
var zdy = y_norm(evs[i].style.height);
var e_type = evs[i].className.split(" ")[0].replace("dhx_cal_", "");
if (e_type === 'dhx_tooltip_line') continue;
var dets = scheduler.getEvent(evs[i].getAttribute("event_id"));
if (!dets) continue;
var day = dets._sday;
var week = dets._sweek;
var length = dets._length || 0;
if (scheduler._mode == "month") {
zdy = parseInt(evs[i].offsetHeight, 10);
zy = parseInt(evs[i].style.top, 10) - 22;
day = de_day(evs[i], day);
week = de_week(evs[i], week);
} else if (scheduler.matrix && scheduler.matrix[scheduler._mode]) {
day = 0;
var el = evs[i].parentNode.parentNode.parentNode;
week = el.rowIndex;
var dy_copy = dy;
dy = evs[i].parentNode.offsetHeight;
zy = y_norm(evs[i].style.top);
zy -= zy * 0.2;
dy = dy_copy;
} else {
if (evs[i].parentNode == scheduler._els.dhx_cal_data[0]) continue;
var parent = scheduler._els["dhx_cal_data"][0].childNodes[0];
var offset = parseFloat(parent.className.indexOf("dhx_scale_holder") != -1 ? parent.style.left : 0);
zx += x_norm(evs[i].parentNode.style.left, offset);
}
xml += "\n<event week='" + week + "' day='" + day + "' type='" + e_type + "' x='" + zx + "' y='" + zy + "' width='" + zdx + "' height='" + zdy + "' len='" + length + "'>";
if (e_type == "event") {
xml += "<header><![CDATA[" + clean_html(evs[i].childNodes[1].innerHTML) + "]]></header>";
var text_color = colors ? get_style(evs[i].childNodes[2], "color") : "";
var bg_color = colors ? get_style(evs[i].childNodes[2], "backgroundColor") : "";
xml += "<body backgroundColor='" + bg_color + "' color='" + text_color + "'><![CDATA[" + clean_html(evs[i].childNodes[2].innerHTML) + "]]></body>";
} else {
var text_color = colors ? get_style(evs[i], "color") : "";
var bg_color = colors ? get_style(evs[i], "backgroundColor") : "";
xml += "<body backgroundColor='" + bg_color + "' color='" + text_color + "'><![CDATA[" + clean_html(evs[i].innerHTML) + "]]></body>";
}
xml += "</event>";
}
}
return xml;
}
function to_pdf(start, end, view, url, mode, header, footer) {
var colors = false;
if (mode == "fullcolor") {
colors = true;
mode = "color";
}
mode = mode || "color";
html_regexp = new RegExp("<[^>]*>", "g");
newline_regexp = new RegExp("<br[^>]*>", "g");
var uid = scheduler.uid();
var d = document.createElement("div");
d.style.display = "none";
document.body.appendChild(d);
d.innerHTML = '<form id="' + uid + '" method="post" target="_blank" action="' + url + '" accept-charset="utf-8" enctype="application/x-www-form-urlencoded"><input type="hidden" name="mycoolxmlbody"/> </form>';
var xml = "";
if (start) {
var original_date = scheduler._date;
var original_mode = scheduler._mode;
xml = xml_top("pages", mode, header, footer);
for (var temp_date = new Date(start); +temp_date < +end; temp_date = scheduler.date.add(temp_date, 1, view)) {
scheduler.setCurrentView(temp_date, view);
xml += xml_start("page") + xml_body_header().replace("\u2013", "-") + xml_body(colors) + xml_end("page");
}
xml += xml_end("pages");
scheduler.setCurrentView(original_date, original_mode);
} else {
xml = xml_top("data", mode, header, footer) + xml_body_header().replace("\u2013", "-") + xml_body(colors) + xml_end("data");
}
document.getElementById(uid).firstChild.value = encodeURIComponent(xml);
document.getElementById(uid).submit();
d.parentNode.removeChild(d);
}
scheduler.toPDF = function(url, mode, header, footer) {
return to_pdf.apply(this, [null, null, null, url, mode, header, footer]);
};
scheduler.toPDFRange = function(start, end, view, url, mode, header, footer) {
if (typeof start == "string") {
start = scheduler.templates.api_date(start);
end = scheduler.templates.api_date(end);
}
return to_pdf.apply(this, arguments);
};
})();
| {'content_hash': '5ca5c78f96ce143d33f57f71b1ee8389', 'timestamp': '', 'source': 'github', 'line_count': 354, 'max_line_length': 211, 'avg_line_length': 35.93785310734463, 'alnum_prop': 0.5646124823141015, 'repo_name': 'OpenLamas/vamk-timetables', 'id': 'ece76f2dca67000829851e29d7ae45f00d83eae6', 'size': '12722', 'binary': False, 'copies': '5', 'ref': 'refs/heads/master', 'path': 'webui/vendor/js/ext/dhtmlxscheduler_pdf.js', 'mode': '33261', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '3207'}, {'name': 'CoffeeScript', 'bytes': '4691'}, {'name': 'JavaScript', 'bytes': '2783'}, {'name': 'Ruby', 'bytes': '24700'}, {'name': 'Scala', 'bytes': '17206'}, {'name': 'Shell', 'bytes': '396'}]} |
<?php
namespace Plugin\GiftWrapping\ServiceProvider;
use Eccube\Application;
use Monolog\Handler\FingersCrossed\ErrorLevelActivationStrategy;
use Monolog\Handler\FingersCrossedHandler;
use Monolog\Handler\RotatingFileHandler;
use Monolog\Logger;
use Plugin\GiftWrapping\Form\Extension\ShoppingTypeExtension;
use Plugin\GiftWrapping\Form\Type\GiftWrappingConfigType;
use Silex\Application as BaseApplication;
use Silex\ServiceProviderInterface;
use Symfony\Component\Yaml\Yaml;
class GiftWrappingServiceProvider implements ServiceProviderInterface
{
public function register(BaseApplication $app)
{
// 管理画面
$app->match('/' . $app['config']['admin_route'] . '/plugin/giftwrapping/config', 'Plugin\GiftWrapping\Controller\ConfigController::index')->bind('plugin_GiftWrapping_config');
$app->match('/plugin/giftwrapping/checkout', 'Plugin\GiftWrapping\Controller\GiftWrappingController::index')->bind('plugin_giftwrapping_index');
// Form
$app['form.types'] = $app->share($app->extend('form.types', function ($types) use ($app) {
$types[] = new GiftWrappingConfigType($app);
return $types;
}));
// Form Extension
$app['form.type.extensions'] = $app->share($app->extend('form.type.extensions', function ($extensions) use ($app) {
$extensions[] = new ShoppingTypeExtension($app);
return $extensions;
}));
// Repository
$app['eccube.plugin.repository.wrapping'] = $app->share(function () use ($app) {
return $app['orm.em']->getRepository('Plugin\GiftWrapping\Entity\Wrapping');
});
// Service
$app['eccube.plugin.service.gift_wrapping'] = $app->share(function () use ($app) {
return new \Plugin\GiftWrapping\Service\GiftWrappingService($app);
});
// メッセージ登録
$app['translator'] = $app->share($app->extend('translator', function ($translator, \Silex\Application $app) {
$translator->addLoader('yaml', new \Symfony\Component\Translation\Loader\YamlFileLoader());
$file = __DIR__ . '/../Resource/locale/message.' . $app['locale'] . '.yml';
if (file_exists($file)) {
$translator->addResource('yaml', $file, $app['locale']);
}
return $translator;
}));
// load config
$conf = $app['config'];
$app['config'] = $app->share(function () use ($conf) {
$confarray = array();
$path_file = __DIR__ . '/../Resource/config/path.yml';
if (file_exists($path_file)) {
$config_yml = Yaml::parse(file_get_contents($path_file));
if (isset($config_yml)) {
$confarray = array_replace_recursive($confarray, $config_yml);
}
}
$constant_file = __DIR__ . '/../Resource/config/constant.yml';
if (file_exists($constant_file)) {
$config_yml = Yaml::parse(file_get_contents($constant_file));
if (isset($config_yml)) {
$confarray = array_replace_recursive($confarray, $config_yml);
}
}
return array_replace_recursive($conf, $confarray);
});
// ログファイル設定
$app['monolog.gift.wrapping'] = $app->share(function ($app) {
$logger = new $app['monolog.logger.class']('gift.wrapping.client');
$file = $app['config']['root_dir'] . '/app/log/giftwrapping.log';
$RotateHandler = new RotatingFileHandler($file, $app['config']['log']['max_files'], Logger::INFO);
$RotateHandler->setFilenameFormat(
'giftwrapping_{date}',
'Y-m-d'
);
$logger->pushHandler(
new FingersCrossedHandler(
$RotateHandler,
new ErrorLevelActivationStrategy(Logger::INFO)
)
);
return $logger;
});
}
public function boot(BaseApplication $app)
{
}
}
| {'content_hash': '13f7207646762f997b05770998e40c2c', 'timestamp': '', 'source': 'github', 'line_count': 107, 'max_line_length': 183, 'avg_line_length': 38.19626168224299, 'alnum_prop': 0.5793980915096648, 'repo_name': 'k-yamamura/eccube3-sample-plugin', 'id': 'ed0d9aa669202c7de25e4170f369d7b30cfceb64', 'size': '4125', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'GiftWrapping/ServiceProvider/GiftWrappingServiceProvider.php', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'HTML', 'bytes': '2404'}, {'name': 'PHP', 'bytes': '17285'}]} |
/* TEMPLATE GENERATED TESTCASE FILE
Filename: CWE134_Uncontrolled_Format_String__char_connect_socket_snprintf_74b.cpp
Label Definition File: CWE134_Uncontrolled_Format_String.label.xml
Template File: sources-sinks-74b.tmpl.cpp
*/
/*
* @description
* CWE: 134 Uncontrolled Format String
* BadSource: connect_socket Read data using a connect socket (client side)
* GoodSource: Copy a fixed string into data
* Sinks: snprintf
* GoodSink: snprintf with "%s" as the third argument and data as the fourth
* BadSink : snprintf with data as the third argument
* Flow Variant: 74 Data flow: data passed in a map from one function to another in different source files
*
* */
#include "std_testcase.h"
#include <map>
#ifndef _WIN32
#include <wchar.h>
#endif
#ifdef _WIN32
#define SNPRINTF _snprintf
#else
#define SNPRINTF snprintf
#endif
using namespace std;
namespace CWE134_Uncontrolled_Format_String__char_connect_socket_snprintf_74
{
#ifndef OMITBAD
void badSink(map<int, char *> dataMap)
{
/* copy data out of dataMap */
char * data = dataMap[2];
{
char dest[100] = "";
/* POTENTIAL FLAW: Do not specify the format allowing a possible format string vulnerability */
SNPRINTF(dest, 100-1, data);
printLine(dest);
}
}
#endif /* OMITBAD */
#ifndef OMITGOOD
/* goodG2B uses the GoodSource with the BadSink */
void goodG2BSink(map<int, char *> dataMap)
{
char * data = dataMap[2];
{
char dest[100] = "";
/* POTENTIAL FLAW: Do not specify the format allowing a possible format string vulnerability */
SNPRINTF(dest, 100-1, data);
printLine(dest);
}
}
/* goodB2G uses the BadSource with the GoodSink */
void goodB2GSink(map<int, char *> dataMap)
{
char * data = dataMap[2];
{
char dest[100] = "";
/* FIX: Specify the format disallowing a format string vulnerability */
SNPRINTF(dest, 100-1, "%s", data);
printLine(dest);
}
}
#endif /* OMITGOOD */
} /* close namespace */
| {'content_hash': '89d6d75ae1d07226ba0dfde028dd8cc2', 'timestamp': '', 'source': 'github', 'line_count': 80, 'max_line_length': 106, 'avg_line_length': 26.2875, 'alnum_prop': 0.6485972420351879, 'repo_name': 'JianpingZeng/xcc', 'id': '26b47db0149ff7b702fc0ff401475600807904b4', 'size': '2103', 'binary': False, 'copies': '2', 'ref': 'refs/heads/master', 'path': 'xcc/test/juliet/testcases/CWE134_Uncontrolled_Format_String/s01/CWE134_Uncontrolled_Format_String__char_connect_socket_snprintf_74b.cpp', 'mode': '33188', 'license': 'bsd-3-clause', 'language': []} |
(function() {
this.LineAxis = (function() {
function LineAxis(stage, stage_h, scale_x, scale_y, type) {
this.stage = stage;
this.stage_h = stage_h;
this.axis_x = (function() {
switch (type) {
case "date":
return d3.svg.axis().scale(scale_x).orient('bottom').tickFormat(d3.time.format("%m/%d"));
default:
return d3.svg.axis().scale(scale_x).orient('bottom');
}
})();
this.axis_y = d3.svg.axis().scale(scale_y).orient('left');
}
LineAxis.prototype._render_x = function() {
return this.stage.append("g").attr({
"class": "x axis",
"transform": "translate(" + [0, this.stage_h] + ")"
}).call(this.axis_x);
};
LineAxis.prototype._render_y = function() {
return this.stage.append("g").attr({
"class": "y axis"
}).call(this.axis_y);
};
LineAxis.prototype.render = function() {
this._render_x();
return this._render_y();
};
return LineAxis;
})();
}).call(this);
| {'content_hash': '63d8130686d37a724d1b8535a3664c8a', 'timestamp': '', 'source': 'github', 'line_count': 39, 'max_line_length': 101, 'avg_line_length': 26.897435897435898, 'alnum_prop': 0.5281220209723546, 'repo_name': 'kouyaf77/d3-classes', 'id': '8329b82d0d0c833b1bc6284ce7e3e8f8016cb6d0', 'size': '1049', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'src/js/line_axis.js', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '458'}, {'name': 'CoffeeScript', 'bytes': '3644'}, {'name': 'JavaScript', 'bytes': '1049'}]} |
package com.baidu.bjf.remoting.protobuf.simplerepeat;
import java.io.IOException;
import java.util.HashSet;
import org.junit.Assert;
import org.junit.Test;
import com.baidu.bjf.remoting.protobuf.Codec;
import com.baidu.bjf.remoting.protobuf.ProtobufProxy;
import com.baidu.bjf.remoting.protobuf.complex.PersonPOJO;
/**
* The Class StringSetTest.
*
* @author xiemalin
* @since 3.4.0
*/
public class StringSetTest {
/**
* Test string set POJO.
*/
@Test
public void testStringSetPOJO() {
Codec<StringSetDojoClass> codec = ProtobufProxy.create(StringSetDojoClass.class, false);
StringSetDojoClass stringSet = new StringSetDojoClass();
stringSet.stringSet = new HashSet<String>();
stringSet.stringSet.add("hello");
stringSet.stringSet.add("world");
stringSet.stringSet.add("xiemalin");
try {
byte[] bs = codec.encode(stringSet);
StringSetDojoClass stringSetDojoClass = codec.decode(bs);
Assert.assertEquals(3, stringSetDojoClass.stringSet.size());
} catch (IOException e) {
Assert.fail(e.getMessage());
}
}
/**
* Test string set POJO 2.
*/
@Test
public void testStringSetPOJO2() {
Codec<StringSetDojoClass> codec = ProtobufProxy.create(StringSetDojoClass.class, false);
StringSetDojoClass stringSet = new StringSetDojoClass();
stringSet.personSet = new HashSet<PersonPOJO>();
stringSet.stringSet = new HashSet<String>();
PersonPOJO pojo = new PersonPOJO();
pojo.name = "hello";
pojo.id = 100;
stringSet.personSet.add(pojo);
stringSet.stringSet.add("hello");
stringSet.stringSet.add("world");
stringSet.stringSet.add("xiemalin");
try {
byte[] bs = codec.encode(stringSet);
StringSetDojoClass stringSetDojoClass = codec.decode(bs);
Assert.assertEquals(3, stringSetDojoClass.stringSet.size());
Assert.assertEquals(1, stringSetDojoClass.personSet.size());
Assert.assertEquals("hello", stringSetDojoClass.personSet.iterator().next().name);
} catch (IOException e) {
Assert.fail(e.getMessage());
}
}
}
| {'content_hash': '08dd5c7d584bc21f5feeac245c6f7da3', 'timestamp': '', 'source': 'github', 'line_count': 80, 'max_line_length': 96, 'avg_line_length': 30.65, 'alnum_prop': 0.5966557911908646, 'repo_name': 'jhunters/jprotobuf', 'id': '3fb581102cea6035edee28cbb820b264e8a13746', 'size': '3324', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'src/test/java/com/baidu/bjf/remoting/protobuf/simplerepeat/StringSetTest.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Java', 'bytes': '2074579'}, {'name': 'Smarty', 'bytes': '3474'}]} |
#pragma once
#include <aws/firehose/Firehose_EXPORTS.h>
#include <aws/core/utils/memory/stl/AWSString.h>
namespace Aws
{
namespace Firehose
{
namespace Model
{
enum class ContentEncoding
{
NOT_SET,
NONE,
GZIP
};
namespace ContentEncodingMapper
{
AWS_FIREHOSE_API ContentEncoding GetContentEncodingForName(const Aws::String& name);
AWS_FIREHOSE_API Aws::String GetNameForContentEncoding(ContentEncoding value);
} // namespace ContentEncodingMapper
} // namespace Model
} // namespace Firehose
} // namespace Aws
| {'content_hash': '2f52e79f63ac22fd74d7a1e3db1f3589', 'timestamp': '', 'source': 'github', 'line_count': 28, 'max_line_length': 84, 'avg_line_length': 19.107142857142858, 'alnum_prop': 0.7588785046728972, 'repo_name': 'jt70471/aws-sdk-cpp', 'id': '6722b953d54198e52d41b43ac0e00c4e4ced8e64', 'size': '654', 'binary': False, 'copies': '4', 'ref': 'refs/heads/master', 'path': 'aws-cpp-sdk-firehose/include/aws/firehose/model/ContentEncoding.h', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'C', 'bytes': '13452'}, {'name': 'C++', 'bytes': '278594037'}, {'name': 'CMake', 'bytes': '653931'}, {'name': 'Dockerfile', 'bytes': '5555'}, {'name': 'HTML', 'bytes': '4471'}, {'name': 'Java', 'bytes': '302182'}, {'name': 'Python', 'bytes': '110380'}, {'name': 'Shell', 'bytes': '4674'}]} |
package gov.nih.nci.caintegrator.application.study.deployment;
import gov.nih.nci.caintegrator.application.arraydata.ArrayDataService;
import gov.nih.nci.caintegrator.application.study.GenomicDataSourceConfiguration;
import gov.nih.nci.caintegrator.data.CaIntegrator2Dao;
import gov.nih.nci.caintegrator.external.DataRetrievalException;
import gov.nih.nci.caintegrator.external.caarray.CaArrayFacade;
/**
* Creates instances of dna analysis handlers.
*/
public interface DnaAnalysisHandlerFactory {
/**
* Creates a handler instance.
*
* @param genomicSource the genomic data source
* @param caArrayFacade the interface to caArray
* @param arrayDataService the array data storage service
* @param dao the data access interface
* @return the handler.
* @exception DataRetrievalException for invalid platform vendor.
*/
AbstractUnparsedSupplementalMappingFileHandler getHandler(GenomicDataSourceConfiguration genomicSource,
CaArrayFacade caArrayFacade,
ArrayDataService arrayDataService,
CaIntegrator2Dao dao) throws DataRetrievalException;
}
| {'content_hash': '887fd8f2684e68418390f84a2c0612fb', 'timestamp': '', 'source': 'github', 'line_count': 30, 'max_line_length': 108, 'avg_line_length': 37.96666666666667, 'alnum_prop': 0.7708516242317822, 'repo_name': 'NCIP/caintegrator', 'id': '4fe2c676e2fd972047907f6aa931440b60c66d35', 'size': '1330', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'caintegrator-war/src/gov/nih/nci/caintegrator/application/study/deployment/DnaAnalysisHandlerFactory.java', 'mode': '33188', 'license': 'bsd-3-clause', 'language': [{'name': 'CSS', 'bytes': '61091'}, {'name': 'FreeMarker', 'bytes': '30688'}, {'name': 'HTML', 'bytes': '828'}, {'name': 'Java', 'bytes': '5239823'}, {'name': 'JavaScript', 'bytes': '163834'}, {'name': 'PLSQL', 'bytes': '55084'}, {'name': 'Perl', 'bytes': '2710'}, {'name': 'Shell', 'bytes': '3376'}, {'name': 'TeX', 'bytes': '90'}, {'name': 'XSLT', 'bytes': '157133'}]} |
//
// PPPdf417RecognizerResult.h
// Pdf417Framework
//
// Created by Jura on 11/07/15.
// Copyright (c) 2015 MicroBlink Ltd. All rights reserved.
//
#import "PPRecognizerResult.h"
@class PPBarcodeDetailedData;
/**
* Result of scanning with PDF417 Recognizer
*
* Contains raw Barcode detailed data, and methods for getting string representation of results.
*/
@interface PPPdf417RecognizerResult : PPRecognizerResult
/**
* Byte array with result of the scan
*/
- (NSData *)data;
/**
* Retrieves string content of the scanned data using guessed encoding.
*
* If you're 100% sure you know the exact encoding in the barcode, use stringUsingEncoding: method.
* Otherwise stringUsingDefaultEncoding.
*
* This method uses NSString stringEncodingForData:encodingOptions:convertedString:usedLossyConversion: method for
* guessing the encoding.
*
* @return created string, or nil if encoding couldn't be found.
*/
- (NSString*)stringUsingGuessedEncoding;
/**
* Retrieves string content of the scanned data using given encoding.
*
* @param encoding The encoding for the returned string.
*
* @return String created from data property, using given encoding
*/
- (NSString*)stringUsingEncoding:(NSStringEncoding)encoding;
/**
* Raw barcode detailed result
*/
- (PPBarcodeDetailedData *)rawData;
/**
* Flag indicating uncertain scanning data
* E.g obtained from damaged barcode.
*/
- (BOOL)isUncertain;
/**
* Returns the location of the barcode on the original image. The location is a quadrangle, which is defined
* with upper left, upper right, lower left and lower right corner.
*
* Points are given in image coordinate system
* (0, 0) - top left point on the image, (width, height)
* bottom right point on the image
*
* @return location of the barcode on the original image.
*
* @note - to get the points in NSArray use PPQuadrangle's toPointsArray method.
*/
- (PPQuadrangle *)locationOnImage;
@end
| {'content_hash': '6baecc63c5ea1d8dee6c389e7168f268', 'timestamp': '', 'source': 'github', 'line_count': 72, 'max_line_length': 115, 'avg_line_length': 27.055555555555557, 'alnum_prop': 0.7376796714579056, 'repo_name': 'qwang216/unit-2-hw-2', 'id': '6929bfe31d0a9d1af742c0a599b11a5f3e5287e2', 'size': '1948', 'binary': False, 'copies': '2', 'ref': 'refs/heads/master', 'path': 'hw-week-2/Blink/Pods/PPBlinkOCR/MicroBlink.embeddedframework/MicroBlink.framework/Versions/A/Headers/PPPdf417RecognizerResult.h', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'Objective-C', 'bytes': '103329'}, {'name': 'Ruby', 'bytes': '53'}, {'name': 'Shell', 'bytes': '10675'}]} |
<?xml version="1.0" encoding="UTF-8"?>
<definitions xmlns="http://ws.apache.org/ns/synapse">
<registry provider="org.wso2.carbon.mediation.registry.WSO2Registry">
<parameter name="cachableDuration">15000</parameter>
</registry>
<taskManager provider="org.wso2.carbon.mediation.ntask.NTaskTaskManager"/>
<proxy name="MyProxy" startOnLoad="true" trace="disable" transports="http https">
<description/>
<target>
<endpoint>
<address uri="http://172.17.0.1:9763/services/HelloService"/>
</endpoint>
<inSequence>
<log/>
</inSequence>
<outSequence>
<send/>
</outSequence>
</target>
</proxy>
<sequence name="fault">
<!-- Log the message at the full log level with the ERROR_MESSAGE and the ERROR_CODE-->
<log level="full">
<property name="MESSAGE" value="Executing default 'fault' sequence"/>
<property expression="get-property('ERROR_CODE')" name="ERROR_CODE"/>
<property expression="get-property('ERROR_MESSAGE')" name="ERROR_MESSAGE"/>
</log>
<!-- Drops the messages by default if there is a fault -->
<drop/>
</sequence>
<sequence name="main">
<in>
<!-- Log all messages passing through -->
<log level="full"/>
<!-- ensure that the default configuration only sends if it is one of samples -->
<!-- Otherwise Synapse would be an open proxy by default (BAD!) -->
<filter regex="http://localhost:9000.*" source="get-property('To')">
<!-- Send the messages where they have been sent (i.e. implicit "To" EPR) -->
<send/>
</filter>
</in>
<out>
<send/>
</out>
<description>The main sequence for the message mediation</description>
</sequence>
<!-- You can add any flat sequences, endpoints, etc.. to this synapse.xml file if you do
*not* want to keep the artifacts in several files -->
</definitions> | {'content_hash': '29eb2c735224bc675d6056f30f8b50ba', 'timestamp': '', 'source': 'github', 'line_count': 49, 'max_line_length': 95, 'avg_line_length': 43.48979591836735, 'alnum_prop': 0.5673392773345847, 'repo_name': 'dimuthud/product-as', 'id': '5d81746925404c315b157b17ac3225fba09f2ff2', 'size': '2131', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'modules/distributed/tests.distributed/src/test/resources/artifacts/common/proxy/synapse.xml', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Batchfile', 'bytes': '49211'}, {'name': 'CSS', 'bytes': '193561'}, {'name': 'HTML', 'bytes': '494024'}, {'name': 'Java', 'bytes': '2646670'}, {'name': 'JavaScript', 'bytes': '491257'}, {'name': 'SQLPL', 'bytes': '509819'}, {'name': 'Shell', 'bytes': '46424'}, {'name': 'XSLT', 'bytes': '36050'}]} |
from mock import patch
from oslo.config import cfg
from neutron.api import extensions as neutron_extensions
from neutron.api.v2 import attributes
from neutron import context
import neutron.db.api as db
from neutron.extensions import portbindings
from neutron.plugins.cisco.db import n1kv_db_v2
from neutron.plugins.cisco.db import network_db_v2 as cdb
from neutron.plugins.cisco import extensions
from neutron.plugins.cisco.extensions import n1kv
from neutron.plugins.cisco.extensions import network_profile
from neutron.plugins.cisco.n1kv import n1kv_client
from neutron.plugins.cisco.n1kv import n1kv_neutron_plugin
from neutron.tests.unit import _test_extension_portbindings as test_bindings
from neutron.tests.unit import test_api_v2
from neutron.tests.unit import test_db_plugin as test_plugin
class FakeResponse(object):
"""
This object is returned by mocked httplib instead of a normal response.
Initialize it with the status code, content type and buffer contents
you wish to return.
"""
def __init__(self, status, response_text, content_type):
self.buffer = response_text
self.status = status
def __getitem__(cls, val):
return "application/xml"
def read(self, *args, **kwargs):
return self.buffer
def _fake_setup_vsm(self):
"""Fake establish Communication with Cisco Nexus1000V VSM."""
self.agent_vsm = True
self._poll_policies(event_type="port_profile")
class NetworkProfileTestExtensionManager(object):
def get_resources(self):
# Add the resources to the global attribute map
# This is done here as the setup process won't
# initialize the main API router which extends
# the global attribute map
attributes.RESOURCE_ATTRIBUTE_MAP.update(
network_profile.RESOURCE_ATTRIBUTE_MAP)
return network_profile.Network_profile.get_resources()
def get_actions(self):
return []
def get_request_extensions(self):
return []
class N1kvPluginTestCase(test_plugin.NeutronDbPluginV2TestCase):
_plugin_name = ('neutron.plugins.cisco.n1kv.'
'n1kv_neutron_plugin.N1kvNeutronPluginV2')
tenant_id = "some_tenant"
DEFAULT_RESP_BODY = ""
DEFAULT_RESP_CODE = 200
DEFAULT_CONTENT_TYPE = ""
fmt = "json"
def _make_test_policy_profile(self, name='service_profile'):
"""
Create a policy profile record for testing purpose.
:param name: string representing the name of the policy profile to
create. Default argument value chosen to correspond to the
default name specified in config.py file.
"""
uuid = test_api_v2._uuid()
profile = {'id': uuid,
'name': name}
return n1kv_db_v2.create_policy_profile(profile)
def _make_test_profile(self, name='default_network_profile'):
"""
Create a profile record for testing purposes.
:param name: string representing the name of the network profile to
create. Default argument value chosen to correspond to the
default name specified in config.py file.
"""
db_session = db.get_session()
profile = {'name': name,
'segment_type': 'vlan',
'physical_network': 'phsy1',
'segment_range': '3968-4047'}
self.network_vlan_ranges = {profile[
'physical_network']: [(3968, 4047)]}
n1kv_db_v2.sync_vlan_allocations(db_session, self.network_vlan_ranges)
return n1kv_db_v2.create_network_profile(db_session, profile)
def setUp(self):
"""
Setup method for n1kv plugin tests.
First step is to define an acceptable response from the VSM to
our requests. This needs to be done BEFORE the setUp() function
of the super-class is called.
This default here works for many cases. If you need something
extra, please define your own setUp() function in your test class,
and set your DEFAULT_RESPONSE value also BEFORE calling the
setUp() of the super-function (this one here). If you have set
a value already, it will not be overwritten by this code.
"""
if not self.DEFAULT_RESP_BODY:
self.DEFAULT_RESP_BODY = (
"""<?xml version="1.0" encoding="utf-8"?>
<set name="events_set">
<instance name="1" url="/api/hyper-v/events/1">
<properties>
<cmd>configure terminal ; port-profile type vethernet grizzlyPP
(SUCCESS)
</cmd>
<id>42227269-e348-72ed-bdb7-7ce91cd1423c</id>
<time>1369223611</time>
<name>grizzlyPP</name>
</properties>
</instance>
<instance name="2" url="/api/hyper-v/events/2">
<properties>
<cmd>configure terminal ; port-profile type vethernet havanaPP
(SUCCESS)
</cmd>
<id>3fc83608-ae36-70e7-9d22-dec745623d06</id>
<time>1369223661</time>
<name>havanaPP</name>
</properties>
</instance>
</set>
""")
# Creating a mock HTTP connection object for httplib. The N1KV client
# interacts with the VSM via HTTP. Since we don't have a VSM running
# in the unit tests, we need to 'fake' it by patching the HTTP library
# itself. We install a patch for a fake HTTP connection class.
# Using __name__ to avoid having to enter the full module path.
http_patcher = patch(n1kv_client.httplib2.__name__ + ".Http")
FakeHttpConnection = http_patcher.start()
self.addCleanup(http_patcher.stop)
# Now define the return values for a few functions that may be called
# on any instance of the fake HTTP connection class.
instance = FakeHttpConnection.return_value
instance.getresponse.return_value = (FakeResponse(
self.DEFAULT_RESP_CODE,
self.DEFAULT_RESP_BODY,
'application/xml'))
instance.request.return_value = (instance.getresponse.return_value,
self.DEFAULT_RESP_BODY)
# Patch some internal functions in a few other parts of the system.
# These help us move along, without having to mock up even more systems
# in the background.
# Return a dummy VSM IP address
get_vsm_hosts_patcher = patch(n1kv_client.__name__ +
".Client._get_vsm_hosts")
fake_get_vsm_hosts = get_vsm_hosts_patcher.start()
self.addCleanup(get_vsm_hosts_patcher.stop)
fake_get_vsm_hosts.return_value = ["127.0.0.1"]
# Return dummy user profiles
get_cred_name_patcher = patch(cdb.__name__ + ".get_credential_name")
fake_get_cred_name = get_cred_name_patcher.start()
self.addCleanup(get_cred_name_patcher.stop)
fake_get_cred_name.return_value = {"user_name": "admin",
"password": "admin_password"}
n1kv_neutron_plugin.N1kvNeutronPluginV2._setup_vsm = _fake_setup_vsm
neutron_extensions.append_api_extensions_path(extensions.__path__)
self.addCleanup(cfg.CONF.reset)
ext_mgr = NetworkProfileTestExtensionManager()
# Save the original RESOURCE_ATTRIBUTE_MAP
self.saved_attr_map = {}
for resource, attrs in attributes.RESOURCE_ATTRIBUTE_MAP.items():
self.saved_attr_map[resource] = attrs.copy()
# Update the RESOURCE_ATTRIBUTE_MAP with n1kv specific extended attrs.
attributes.RESOURCE_ATTRIBUTE_MAP["networks"].update(
n1kv.EXTENDED_ATTRIBUTES_2_0["networks"])
attributes.RESOURCE_ATTRIBUTE_MAP["ports"].update(
n1kv.EXTENDED_ATTRIBUTES_2_0["ports"])
self.addCleanup(self.restore_resource_attribute_map)
self.addCleanup(db.clear_db)
super(N1kvPluginTestCase, self).setUp(self._plugin_name,
ext_mgr=ext_mgr)
# Create some of the database entries that we require.
self._make_test_profile()
self._make_test_policy_profile()
def restore_resource_attribute_map(self):
# Restore the original RESOURCE_ATTRIBUTE_MAP
attributes.RESOURCE_ATTRIBUTE_MAP = self.saved_attr_map
def test_plugin(self):
self._make_network('json',
'some_net',
True,
tenant_id=self.tenant_id,
set_context=True)
req = self.new_list_request('networks', params="fields=tenant_id")
req.environ['neutron.context'] = context.Context('', self.tenant_id)
res = req.get_response(self.api)
self.assertEqual(res.status_int, 200)
body = self.deserialize('json', res)
self.assertIn('tenant_id', body['networks'][0])
class TestN1kvNetworkProfiles(N1kvPluginTestCase):
def _prepare_net_profile_data(self, segment_type):
netp = {'network_profile': {'name': 'netp1',
'segment_type': segment_type,
'tenant_id': self.tenant_id}}
if segment_type == 'vlan':
netp['network_profile']['segment_range'] = '100-180'
netp['network_profile']['physical_network'] = 'phys1'
elif segment_type == 'overlay':
netp['network_profile']['segment_range'] = '10000-10010'
netp['network_profile']['sub_type'] = 'enhanced'
return netp
def test_create_network_profile_plugin(self):
data = self._prepare_net_profile_data('vlan')
net_p_req = self.new_create_request('network_profiles', data)
res = net_p_req.get_response(self.ext_api)
self.assertEqual(res.status_int, 201)
def test_update_network_profile_physical_network_fail(self):
net_p = self._make_test_profile(name='netp1')
data = {'network_profile': {'physical_network': 'some-phys-net'}}
net_p_req = self.new_update_request('network_profiles',
data,
net_p['id'])
res = net_p_req.get_response(self.ext_api)
self.assertEqual(res.status_int, 400)
def test_update_network_profile_segment_type_fail(self):
net_p = self._make_test_profile(name='netp1')
data = {'network_profile': {'segment_type': 'overlay'}}
net_p_req = self.new_update_request('network_profiles',
data,
net_p['id'])
res = net_p_req.get_response(self.ext_api)
self.assertEqual(res.status_int, 400)
def test_update_network_profile_sub_type_fail(self):
net_p_dict = self._prepare_net_profile_data('overlay')
net_p_req = self.new_create_request('network_profiles', net_p_dict)
net_p = self.deserialize(self.fmt,
net_p_req.get_response(self.ext_api))
data = {'network_profile': {'sub_type': 'vlan'}}
update_req = self.new_update_request('network_profiles',
data,
net_p['network_profile']['id'])
update_res = update_req.get_response(self.ext_api)
self.assertEqual(update_res.status_int, 400)
class TestN1kvBasicGet(test_plugin.TestBasicGet,
N1kvPluginTestCase):
pass
class TestN1kvHTTPResponse(test_plugin.TestV2HTTPResponse,
N1kvPluginTestCase):
pass
class TestN1kvPorts(test_plugin.TestPortsV2,
N1kvPluginTestCase,
test_bindings.PortBindingsTestCase):
VIF_TYPE = portbindings.VIF_TYPE_OVS
HAS_PORT_FILTER = False
def test_create_port_with_default_n1kv_policy_profile_id(self):
"""Test port create without passing policy profile id."""
with self.port() as port:
db_session = db.get_session()
pp = n1kv_db_v2.get_policy_profile(
db_session, port['port'][n1kv.PROFILE_ID])
self.assertEqual(pp['name'], 'service_profile')
def test_create_port_with_n1kv_policy_profile_id(self):
"""Test port create with policy profile id."""
profile_obj = self._make_test_policy_profile(name='test_profile')
with self.network() as network:
data = {'port': {n1kv.PROFILE_ID: profile_obj.id,
'tenant_id': self.tenant_id,
'network_id': network['network']['id']}}
port_req = self.new_create_request('ports', data)
port = self.deserialize(self.fmt,
port_req.get_response(self.api))
self.assertEqual(port['port'][n1kv.PROFILE_ID],
profile_obj.id)
self._delete('ports', port['port']['id'])
def test_update_port_with_n1kv_policy_profile_id(self):
"""Test port update failure while updating policy profile id."""
with self.port() as port:
data = {'port': {n1kv.PROFILE_ID: 'some-profile-uuid'}}
port_req = self.new_update_request('ports',
data,
port['port']['id'])
res = port_req.get_response(self.api)
# Port update should fail to update policy profile id.
self.assertEqual(res.status_int, 400)
class TestN1kvNetworks(test_plugin.TestNetworksV2,
N1kvPluginTestCase):
def _prepare_net_data(self, net_profile_id):
return {'network': {'name': 'net1',
n1kv.PROFILE_ID: net_profile_id,
'tenant_id': self.tenant_id}}
def test_create_network_with_default_n1kv_network_profile_id(self):
"""Test network create without passing network profile id."""
with self.network() as network:
db_session = db.get_session()
np = n1kv_db_v2.get_network_profile(
db_session, network['network'][n1kv.PROFILE_ID])
self.assertEqual(np['name'], 'default_network_profile')
def test_create_network_with_n1kv_network_profile_id(self):
"""Test network create with network profile id."""
profile_obj = self._make_test_profile(name='test_profile')
data = self._prepare_net_data(profile_obj.id)
network_req = self.new_create_request('networks', data)
network = self.deserialize(self.fmt,
network_req.get_response(self.api))
self.assertEqual(network['network'][n1kv.PROFILE_ID],
profile_obj.id)
def test_update_network_with_n1kv_network_profile_id(self):
"""Test network update failure while updating network profile id."""
with self.network() as network:
data = {'network': {n1kv.PROFILE_ID: 'some-profile-uuid'}}
network_req = self.new_update_request('networks',
data,
network['network']['id'])
res = network_req.get_response(self.api)
# Network update should fail to update network profile id.
self.assertEqual(res.status_int, 400)
class TestN1kvSubnets(test_plugin.TestSubnetsV2,
N1kvPluginTestCase):
pass
| {'content_hash': 'ba401b845a22bb952bb974c24d4f812c', 'timestamp': '', 'source': 'github', 'line_count': 370, 'max_line_length': 79, 'avg_line_length': 42.87837837837838, 'alnum_prop': 0.5859439016703435, 'repo_name': 'beagles/neutron_hacking', 'id': '909df25905a23c22ba48de36538cb6a38bd6dc19', 'size': '16616', 'binary': False, 'copies': '4', 'ref': 'refs/heads/neutron_oslo_messaging', 'path': 'neutron/tests/unit/cisco/n1kv/test_n1kv_plugin.py', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'CSS', 'bytes': '37307'}, {'name': 'JavaScript', 'bytes': '67930'}, {'name': 'Python', 'bytes': '8125263'}, {'name': 'Shell', 'bytes': '8920'}, {'name': 'XSLT', 'bytes': '50907'}]} |
using Xamarin.Forms;
namespace XamJam.Nav
{
public interface INavScheme
{
SchemeType SchemeType { get; }
INavScheme Parent { get; }
Page CurrentPage { get; }
bool IsDisplayed { get; set; }
}
} | {'content_hash': 'b2cbeb94a81ccdf3211cf7ce9a6884f8', 'timestamp': '', 'source': 'github', 'line_count': 15, 'max_line_length': 38, 'avg_line_length': 16.066666666666666, 'alnum_prop': 0.5850622406639004, 'repo_name': 'jasonCodesAway/XamJam', 'id': '67c1fd7a446362cd97f1f6b4415a0f182ed0382f', 'size': '243', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'XamJam.Nav/INavScheme.cs', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Batchfile', 'bytes': '2803'}, {'name': 'C#', 'bytes': '150380'}]} |
<demo-template [description]="description" [demoSources]="demoSources">
<div class="demo-showcase">
<span>单选</span>
<j-combo-select [value]="comboValue1">
<ng-template>
<j-cascade
width="418"
labelField="name"
[(selectedItems)]="selectedItems1"
[dataGenerator]="generator"
[generatorContext]="this"
style="min-height: 335px"
>
</j-cascade>
</ng-template>
</j-combo-select>
</div>
<div class="demo-showcase">
<span>多选</span>
<j-combo-select [value]="comboValue2">
<ng-template>
<j-cascade
width="418"
labelField="name"
[(selectedItems)]="selectedItems2"
[dataGenerator]="generator"
[generatorContext]="this"
[multipleSelect]="true"
style="min-height: 335px"
>
</j-cascade>
</ng-template>
</j-combo-select>
</div>
</demo-template>
| {'content_hash': 'dbaef951e975c37be4882a7cdce90e0d', 'timestamp': '', 'source': 'github', 'line_count': 36, 'max_line_length': 71, 'avg_line_length': 33.27777777777778, 'alnum_prop': 0.4373956594323873, 'repo_name': 'rdkmaster/jigsaw', 'id': '5404ab46a916f7b849e2b3becf687992696eaed1', 'size': '1206', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'src/app/for-external/demo/cascade/with-combo/demo.component.html', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'AutoIt', 'bytes': '7636'}, {'name': 'CSS', 'bytes': '3231221'}, {'name': 'HTML', 'bytes': '10423519'}, {'name': 'JavaScript', 'bytes': '1132519'}, {'name': 'SCSS', 'bytes': '535631'}, {'name': 'Shell', 'bytes': '12910'}, {'name': 'TypeScript', 'bytes': '4455145'}]} |
package com.fitpay.android.webview.impl;
/**
* @deprecated as of v1.0.3 - not being used
* Created by Ross on 5/27/2016.
*/
@Deprecated
public class AppCallbackModel {
private String command;
private String status;
private String reason;
private String timestamp;
public String getCommand() {
return command;
}
public void setCommand(String command) {
this.command = command;
}
public String getStatus() {
return status;
}
public void setStatus(String status) {
this.status = status;
}
public String getReason() {
return reason;
}
public void setReason(String reason) {
this.reason = reason;
}
public String getTimestamp() {
return timestamp;
}
public void setTimestamp(String timestamp) {
this.timestamp = timestamp;
}
}
| {'content_hash': 'e1d5ffa73ad6dca4bf9c81a9b7ede500', 'timestamp': '', 'source': 'github', 'line_count': 46, 'max_line_length': 48, 'avg_line_length': 19.108695652173914, 'alnum_prop': 0.6222980659840728, 'repo_name': 'fitpay/fitpay-android-sdk', 'id': 'a3c1fb881bb7075ab8abe14fa8e7d8aeff3b948f', 'size': '879', 'binary': False, 'copies': '1', 'ref': 'refs/heads/develop', 'path': 'fitpay/src/main/java/com/fitpay/android/webview/impl/AppCallbackModel.java', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '12447'}, {'name': 'Java', 'bytes': '929850'}, {'name': 'Kotlin', 'bytes': '3211'}, {'name': 'Shell', 'bytes': '985'}]} |
package com.allanbank.mongodb.util;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import javax.annotation.concurrent.ThreadSafe;
import com.allanbank.mongodb.MongoDbException;
/**
* FutureUtils provides helper methods for dealing with {@link Future}s.
*
* @api.no This class is <b>NOT</b> part of the drivers API. This class may be
* mutated in incompatible ways between any two releases of the driver.
* @copyright 2012-2013, Allanbank Consulting, Inc., All Rights Reserved
*/
@ThreadSafe
public final class FutureUtils {
/**
* Unwraps the contents of the Future.
*
* @param <T>
* The type of the future and response.
* @param future
* The future value to get.
* @return The response from the Future.
* @throws MongoDbException
* On an error from the Future.
*/
public static <T> T unwrap(final Future<T> future) {
try {
return future.get();
}
catch (final InterruptedException e) {
e.fillInStackTrace();
throw new MongoDbException(e);
}
catch (final ExecutionException e) {
final Throwable cause = e.getCause();
cause.fillInStackTrace();
if (cause instanceof MongoDbException) {
throw (MongoDbException) cause;
}
throw new MongoDbException(cause);
}
}
/**
* Stop creation of a new FutureUtils.
*/
private FutureUtils() {
// Nothing.
}
} | {'content_hash': 'd0339a0447f0a76e25335beeffe15af4', 'timestamp': '', 'source': 'github', 'line_count': 56, 'max_line_length': 79, 'avg_line_length': 28.267857142857142, 'alnum_prop': 0.607706885660139, 'repo_name': 'allanbank/mongodb-async-driver', 'id': 'ce4f11370769c0f7cf43888781a506a82a47e284', 'size': '2299', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'src/main/java/com/allanbank/mongodb/util/FutureUtils.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Java', 'bytes': '6914627'}, {'name': 'JavaScript', 'bytes': '4075'}, {'name': 'Shell', 'bytes': '4566'}]} |
<!DOCTYPE html>
<!--
@license
Copyright (C) 2016 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<meta name="viewport" content="width=device-width, minimum-scale=1.0, initial-scale=1.0, user-scalable=yes">
<title>keyboard-shortcut-behavior</title>
<script src="../../bower_components/webcomponentsjs/webcomponents.min.js"></script>
<script src="../../bower_components/web-component-tester/browser.js"></script>
<link rel="import" href="../../test/common-test-setup.html"/>
<link rel="import" href="keyboard-shortcut-behavior.html">
<test-fixture id="basic">
<template>
<test-element></test-element>
</template>
</test-fixture>
<test-fixture id="within-overlay">
<template>
<gr-overlay>
<test-element></test-element>
</gr-overlay>
</template>
</test-fixture>
<script>
suite('keyboard-shortcut-behavior tests', () => {
const kb = window.Gerrit.KeyboardShortcutBinder;
let element;
let overlay;
let sandbox;
suiteSetup(() => {
// Define a Polymer element that uses this behavior.
Polymer({
is: 'test-element',
behaviors: [Gerrit.KeyboardShortcutBehavior],
keyBindings: {
k: '_handleKey',
enter: '_handleKey',
},
_handleKey() {},
});
});
setup(() => {
element = fixture('basic');
overlay = fixture('within-overlay');
sandbox = sinon.sandbox.create();
});
teardown(() => {
sandbox.restore();
});
suite('ShortcutManager', () => {
test('bindings management', () => {
const mgr = new kb.ShortcutManager();
const {NEXT_FILE} = kb.Shortcut;
assert.isUndefined(mgr.getBindingsForShortcut(NEXT_FILE));
mgr.bindShortcut(NEXT_FILE, ']', '}', 'right');
assert.deepEqual(
mgr.getBindingsForShortcut(NEXT_FILE),
[']', '}', 'right']);
});
suite('binding descriptions', () => {
function mapToObject(m) {
const o = {};
m.forEach((v, k) => o[k] = v);
return o;
}
test('single combo description', () => {
const mgr = new kb.ShortcutManager();
assert.deepEqual(mgr.describeBinding('a'), ['a']);
assert.deepEqual(mgr.describeBinding('a:keyup'), ['a']);
assert.deepEqual(mgr.describeBinding('ctrl+a'), ['Ctrl', 'a']);
assert.deepEqual(
mgr.describeBinding('ctrl+shift+up:keyup'),
['Ctrl', 'Shift', '↑']);
});
test('combo set description', () => {
const {GO_KEY, DOC_ONLY, ShortcutManager} = kb;
const {GO_TO_OPENED_CHANGES, NEXT_FILE, PREV_FILE} = kb.Shortcut;
const mgr = new ShortcutManager();
assert.isNull(mgr.describeBindings(NEXT_FILE));
mgr.bindShortcut(GO_TO_OPENED_CHANGES, GO_KEY, 'o');
assert.deepEqual(
mgr.describeBindings(GO_TO_OPENED_CHANGES),
[['g', 'o']]);
mgr.bindShortcut(NEXT_FILE, DOC_ONLY, ']', 'ctrl+shift+right:keyup');
assert.deepEqual(
mgr.describeBindings(NEXT_FILE),
[[']'], ['Ctrl', 'Shift', '→']]);
mgr.bindShortcut(PREV_FILE, '[');
assert.deepEqual(mgr.describeBindings(PREV_FILE), [['[']]);
});
test('combo set description width', () => {
const mgr = new kb.ShortcutManager();
assert.strictEqual(mgr.comboSetDisplayWidth([['u']]), 1);
assert.strictEqual(mgr.comboSetDisplayWidth([['g', 'o']]), 2);
assert.strictEqual(mgr.comboSetDisplayWidth([['Shift', 'r']]), 6);
assert.strictEqual(mgr.comboSetDisplayWidth([['x'], ['y']]), 4);
assert.strictEqual(
mgr.comboSetDisplayWidth([['x'], ['y'], ['Shift', 'z']]),
12);
});
test('distribute shortcut help', () => {
const mgr = new kb.ShortcutManager();
assert.deepEqual(mgr.distributeBindingDesc([['o']]), [[['o']]]);
assert.deepEqual(
mgr.distributeBindingDesc([['g', 'o']]),
[[['g', 'o']]]);
assert.deepEqual(
mgr.distributeBindingDesc([['ctrl', 'shift', 'meta', 'enter']]),
[[['ctrl', 'shift', 'meta', 'enter']]]);
assert.deepEqual(
mgr.distributeBindingDesc([
['ctrl', 'shift', 'meta', 'enter'],
['o'],
]),
[
[['ctrl', 'shift', 'meta', 'enter']],
[['o']],
]);
assert.deepEqual(
mgr.distributeBindingDesc([
['ctrl', 'enter'],
['meta', 'enter'],
['ctrl', 's'],
['meta', 's'],
]),
[
[['ctrl', 'enter'], ['meta', 'enter']],
[['ctrl', 's'], ['meta', 's']],
]);
});
test('active shortcuts by section', () => {
const {NEXT_FILE, NEXT_LINE, GO_TO_OPENED_CHANGES, SEARCH} =
kb.Shortcut;
const {DIFFS, EVERYWHERE, NAVIGATION} = kb.ShortcutSection;
const mgr = new kb.ShortcutManager();
mgr.bindShortcut(NEXT_FILE, ']');
mgr.bindShortcut(NEXT_LINE, 'j');
mgr.bindShortcut(GO_TO_OPENED_CHANGES, 'g+o');
mgr.bindShortcut(SEARCH, '/');
assert.deepEqual(
mapToObject(mgr.activeShortcutsBySection()),
{});
mgr.attachHost({
keyboardShortcuts() {
return {
[NEXT_FILE]: null,
};
},
});
assert.deepEqual(
mapToObject(mgr.activeShortcutsBySection()),
{
[NAVIGATION]: [
{shortcut: NEXT_FILE, text: 'Select next file'},
],
});
mgr.attachHost({
keyboardShortcuts() {
return {
[NEXT_LINE]: null,
};
},
});
assert.deepEqual(
mapToObject(mgr.activeShortcutsBySection()),
{
[DIFFS]: [
{shortcut: NEXT_LINE, text: 'Go to next line'},
],
[NAVIGATION]: [
{shortcut: NEXT_FILE, text: 'Select next file'},
],
});
mgr.attachHost({
keyboardShortcuts() {
return {
[SEARCH]: null,
[GO_TO_OPENED_CHANGES]: null,
};
},
});
assert.deepEqual(
mapToObject(mgr.activeShortcutsBySection()),
{
[DIFFS]: [
{shortcut: NEXT_LINE, text: 'Go to next line'},
],
[EVERYWHERE]: [
{shortcut: SEARCH, text: 'Search'},
{
shortcut: GO_TO_OPENED_CHANGES,
text: 'Go to Opened Changes',
},
],
[NAVIGATION]: [
{shortcut: NEXT_FILE, text: 'Select next file'},
],
});
});
test('directory view', () => {
const {
NEXT_FILE, NEXT_LINE, GO_TO_OPENED_CHANGES, SEARCH,
SAVE_COMMENT,
} = kb.Shortcut;
const {DIFFS, EVERYWHERE, NAVIGATION} = kb.ShortcutSection;
const {GO_KEY, ShortcutManager} = kb;
const mgr = new ShortcutManager();
mgr.bindShortcut(NEXT_FILE, ']');
mgr.bindShortcut(NEXT_LINE, 'j');
mgr.bindShortcut(GO_TO_OPENED_CHANGES, GO_KEY, 'o');
mgr.bindShortcut(SEARCH, '/');
mgr.bindShortcut(
SAVE_COMMENT, 'ctrl+enter', 'meta+enter', 'ctrl+s', 'meta+s');
assert.deepEqual(mapToObject(mgr.directoryView()), {});
mgr.attachHost({
keyboardShortcuts() {
return {
[GO_TO_OPENED_CHANGES]: null,
[NEXT_FILE]: null,
[NEXT_LINE]: null,
[SAVE_COMMENT]: null,
[SEARCH]: null,
};
},
});
assert.deepEqual(
mapToObject(mgr.directoryView()),
{
[DIFFS]: [
{binding: [['j']], text: 'Go to next line'},
{
binding: [['Ctrl', 'Enter'], ['Meta', 'Enter']],
text: 'Save comment',
},
{
binding: [['Ctrl', 's'], ['Meta', 's']],
text: 'Save comment',
},
],
[EVERYWHERE]: [
{binding: [['/']], text: 'Search'},
{binding: [['g', 'o']], text: 'Go to Opened Changes'},
],
[NAVIGATION]: [
{binding: [[']']], text: 'Select next file'},
],
});
});
});
});
test('doesn’t block kb shortcuts for non-whitelisted els', done => {
const divEl = document.createElement('div');
element.appendChild(divEl);
element._handleKey = e => {
assert.isFalse(element.shouldSuppressKeyboardShortcut(e));
done();
};
MockInteractions.keyDownOn(divEl, 75, null, 'k');
});
test('blocks kb shortcuts for input els', done => {
const inputEl = document.createElement('input');
element.appendChild(inputEl);
element._handleKey = e => {
assert.isTrue(element.shouldSuppressKeyboardShortcut(e));
done();
};
MockInteractions.keyDownOn(inputEl, 75, null, 'k');
});
test('blocks kb shortcuts for textarea els', done => {
const textareaEl = document.createElement('textarea');
element.appendChild(textareaEl);
element._handleKey = e => {
assert.isTrue(element.shouldSuppressKeyboardShortcut(e));
done();
};
MockInteractions.keyDownOn(textareaEl, 75, null, 'k');
});
test('blocks kb shortcuts for anything in a gr-overlay', done => {
const divEl = document.createElement('div');
const element = overlay.querySelector('test-element');
element.appendChild(divEl);
element._handleKey = e => {
assert.isTrue(element.shouldSuppressKeyboardShortcut(e));
done();
};
MockInteractions.keyDownOn(divEl, 75, null, 'k');
});
test('blocks enter shortcut on an anchor', done => {
const anchorEl = document.createElement('a');
const element = overlay.querySelector('test-element');
element.appendChild(anchorEl);
element._handleKey = e => {
assert.isTrue(element.shouldSuppressKeyboardShortcut(e));
done();
};
MockInteractions.keyDownOn(anchorEl, 13, null, 'enter');
});
test('modifierPressed returns accurate values', () => {
const spy = sandbox.spy(element, 'modifierPressed');
element._handleKey = e => {
element.modifierPressed(e);
};
MockInteractions.keyDownOn(element, 75, 'shift', 'k');
assert.isTrue(spy.lastCall.returnValue);
MockInteractions.keyDownOn(element, 75, null, 'k');
assert.isFalse(spy.lastCall.returnValue);
MockInteractions.keyDownOn(element, 75, 'ctrl', 'k');
assert.isTrue(spy.lastCall.returnValue);
MockInteractions.keyDownOn(element, 75, null, 'k');
assert.isFalse(spy.lastCall.returnValue);
MockInteractions.keyDownOn(element, 75, 'meta', 'k');
assert.isTrue(spy.lastCall.returnValue);
MockInteractions.keyDownOn(element, 75, null, 'k');
assert.isFalse(spy.lastCall.returnValue);
MockInteractions.keyDownOn(element, 75, 'alt', 'k');
assert.isTrue(spy.lastCall.returnValue);
});
test('isModifierPressed returns accurate value', () => {
const spy = sandbox.spy(element, 'isModifierPressed');
element._handleKey = e => {
element.isModifierPressed(e, 'shiftKey');
};
MockInteractions.keyDownOn(element, 75, 'shift', 'k');
assert.isTrue(spy.lastCall.returnValue);
MockInteractions.keyDownOn(element, 75, null, 'k');
assert.isFalse(spy.lastCall.returnValue);
MockInteractions.keyDownOn(element, 75, 'ctrl', 'k');
assert.isFalse(spy.lastCall.returnValue);
MockInteractions.keyDownOn(element, 75, null, 'k');
assert.isFalse(spy.lastCall.returnValue);
MockInteractions.keyDownOn(element, 75, 'meta', 'k');
assert.isFalse(spy.lastCall.returnValue);
MockInteractions.keyDownOn(element, 75, null, 'k');
assert.isFalse(spy.lastCall.returnValue);
MockInteractions.keyDownOn(element, 75, 'alt', 'k');
assert.isFalse(spy.lastCall.returnValue);
});
suite('GO_KEY timing', () => {
let handlerStub;
setup(() => {
element._shortcut_go_table.set('a', '_handleA');
handlerStub = element._handleA = sinon.stub();
sandbox.stub(Date, 'now').returns(10000);
});
test('success', () => {
const e = {detail: {key: 'a'}, preventDefault: () => {}};
sandbox.stub(element, 'shouldSuppressKeyboardShortcut').returns(false);
element._shortcut_go_key_last_pressed = 9000;
element._handleGoAction(e);
assert.isTrue(handlerStub.calledOnce);
assert.strictEqual(handlerStub.lastCall.args[0], e);
});
test('go key not pressed', () => {
const e = {detail: {key: 'a'}, preventDefault: () => {}};
sandbox.stub(element, 'shouldSuppressKeyboardShortcut').returns(false);
element._shortcut_go_key_last_pressed = null;
element._handleGoAction(e);
assert.isFalse(handlerStub.called);
});
test('go key pressed too long ago', () => {
const e = {detail: {key: 'a'}, preventDefault: () => {}};
sandbox.stub(element, 'shouldSuppressKeyboardShortcut').returns(false);
element._shortcut_go_key_last_pressed = 3000;
element._handleGoAction(e);
assert.isFalse(handlerStub.called);
});
test('should suppress', () => {
const e = {detail: {key: 'a'}, preventDefault: () => {}};
sandbox.stub(element, 'shouldSuppressKeyboardShortcut').returns(true);
element._shortcut_go_key_last_pressed = 9000;
element._handleGoAction(e);
assert.isFalse(handlerStub.called);
});
test('unrecognized key', () => {
const e = {detail: {key: 'f'}, preventDefault: () => {}};
sandbox.stub(element, 'shouldSuppressKeyboardShortcut').returns(false);
element._shortcut_go_key_last_pressed = 9000;
element._handleGoAction(e);
assert.isFalse(handlerStub.called);
});
});
});
</script>
| {'content_hash': 'ba126cf5f6516de686f641b68f35edfe', 'timestamp': '', 'source': 'github', 'line_count': 439, 'max_line_length': 108, 'avg_line_length': 34.97266514806378, 'alnum_prop': 0.5331205627564646, 'repo_name': 'WANdisco/gerrit', 'id': '15d31b76593232e34f8847c7617b46050eacb7b0', 'size': '15359', 'binary': False, 'copies': '1', 'ref': 'refs/heads/2.16.21_WD', 'path': 'polygerrit-ui/app/behaviors/keyboard-shortcut-behavior/keyboard-shortcut-behavior_test.html', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'CSS', 'bytes': '47431'}, {'name': 'GAP', 'bytes': '4119'}, {'name': 'Go', 'bytes': '5563'}, {'name': 'HTML', 'bytes': '726266'}, {'name': 'Java', 'bytes': '11491861'}, {'name': 'JavaScript', 'bytes': '404723'}, {'name': 'Makefile', 'bytes': '7107'}, {'name': 'PLpgSQL', 'bytes': '3576'}, {'name': 'Perl', 'bytes': '9943'}, {'name': 'Prolog', 'bytes': '17904'}, {'name': 'Python', 'bytes': '267395'}, {'name': 'Roff', 'bytes': '32749'}, {'name': 'Shell', 'bytes': '133358'}]} |
package com.zyh.factory.service;
import java.math.BigDecimal;
import org.springframework.stereotype.Service;
import com.zyh.factory.entity.OrderEntity;
import com.zyh.factory.entity.SourceDemandEntity;
import com.zyh.factory.entity.SourceEntity;
import com.zyh.factory.entity.WorkEntity;
import com.zyh.factory.transman.TransMessage;
@Service
public class SourceDemandServiceImpl implements SourceDemandService {
@Override
public SourceDemandEntity create(TransMessage transMessage,
SourceDemandEntity sourceDemand, OrderEntity order, WorkEntity work, SourceEntity source) {
if (sourceDemand.getSourceColor() ==null) {
transMessage.doSetError("color missing");
return sourceDemand;
}
if (sourceDemand.getQuantity() <= 0) {
transMessage.doSetError("pls input Quantity.");
return sourceDemand;
}
sourceDemand.setOrderId(order.getOrderId());
sourceDemand.setDemandStatus("new");
sourceDemand.setSourceId(source.getSourceId());
//sourceDemand.setSourceColor();
//sourceDemand.setQuantity(int quantity)
sourceDemand.setSourceUnit(source.getSourceUnit());
sourceDemand.setTotalPrice(source.getPrice().multiply(BigDecimal.valueOf(sourceDemand.getQuantity())));
sourceDemand.setProviderName(source.getProviderName());
sourceDemand.setSourceName(source.getSourceName());
return sourceDemand;
}
}
| {'content_hash': 'c32c6d0e489004d640dd72e4a1ac7f45', 'timestamp': '', 'source': 'github', 'line_count': 40, 'max_line_length': 105, 'avg_line_length': 33.525, 'alnum_prop': 0.7949291573452647, 'repo_name': 'zhouyihang/FactoryManagement', 'id': '693949de14496169e8d238397ed4b34bac75a80e', 'size': '1341', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'src/main/java/com/zyh/factory/service/SourceDemandServiceImpl.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Batchfile', 'bytes': '5006'}, {'name': 'CSS', 'bytes': '57716'}, {'name': 'HTML', 'bytes': '63867'}, {'name': 'Java', 'bytes': '69572'}, {'name': 'JavaScript', 'bytes': '308907'}, {'name': 'Shell', 'bytes': '7058'}]} |
package com.github.ltsopensource.nio.handler;
import com.github.ltsopensource.nio.channel.NioChannel;
import com.github.ltsopensource.nio.idle.IdleState;
/**
* @author Robert HG ([email protected]) on 1/24/16.
*/
public interface NioHandler {
void exceptionCaught(NioChannel channel, Exception cause);
void messageReceived(NioChannel channel, Object msg) throws Exception;
void channelConnected(NioChannel channel);
void channelIdle(NioChannel channel, IdleState state);
}
| {'content_hash': 'ceb012699c4243f1b5ee4a61888cb305', 'timestamp': '', 'source': 'github', 'line_count': 18, 'max_line_length': 74, 'avg_line_length': 27.555555555555557, 'alnum_prop': 0.7741935483870968, 'repo_name': 'ltsopensource/light-task-scheduler', 'id': '7a03c425a4069c9fa00ee01dcadc1398acf914f6', 'size': '496', 'binary': False, 'copies': '2', 'ref': 'refs/heads/master', 'path': 'lts-core/src/main/java/com/github/ltsopensource/nio/handler/NioHandler.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Batchfile', 'bytes': '2281'}, {'name': 'Java', 'bytes': '2905956'}, {'name': 'Shell', 'bytes': '8561'}]} |
package gameObject;
import static org.lwjgl.opengl.GL11.*;
import util.Sprite;
public class GameObject
{
public static final int ID_PLAYER = 1;
public static final int ID_ENEMY = 2;
public static final int ID_ITEM = 3;
public static final int FLAG_REMOVE = 0;
private boolean[] flags = new boolean[1];
protected float x, y;
protected Sprite sprite;
protected int type;
public void update()
{
}
public void render()
{
glPushMatrix();
{
glTranslatef(x, y, 0);
sprite.render();
}
glPopMatrix();
}
protected void init(int type, float x, float y, float r, float g, float b, float sizeX, float sizeY)
{
this.x = x;
this.y = y;
this.sprite = new Sprite(r,g,b,sizeX, sizeY);
this.type = type;
}
protected void remove()
{
flags[FLAG_REMOVE] = true;
}
public boolean[] getFlags()
{
return flags;
}
public float getX()
{
return x;
}
public float getY()
{
return y;
}
public float getSizeX()
{
return sprite.getSizeX();
}
public float getSizeY()
{
return sprite.getSizeX();
}
public int getType()
{
return type;
}
}
| {'content_hash': '6e57eaaa625c17dbbd713736b61d4fcb', 'timestamp': '', 'source': 'github', 'line_count': 78, 'max_line_length': 101, 'avg_line_length': 15.166666666666666, 'alnum_prop': 0.6018596787827557, 'repo_name': 'johnnicholson0/TimeRPG', 'id': '5e61ee8fc148371175591bb608f7436d89421010', 'size': '1183', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'TimeRPG/src/gameObject/GameObject.java', 'mode': '33188', 'license': 'mit', 'language': []} |
package org.apache.lucene.index;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.PrintStream;
import java.io.Reader;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Random;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.analysis.MockTokenizer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.document.StringField;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util._TestUtil;
public class TestIndexWriterDelete extends LuceneTestCase {
// test the simple case
public void testSimpleCase() throws IOException {
String[] keywords = { "1", "2" };
String[] unindexed = { "Netherlands", "Italy" };
String[] unstored = { "Amsterdam has lots of bridges",
"Venice has lots of canals" };
String[] text = { "Amsterdam", "Venice" };
Directory dir = newDirectory();
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setMaxBufferedDeleteTerms(1));
FieldType custom1 = new FieldType();
custom1.setStored(true);
for (int i = 0; i < keywords.length; i++) {
Document doc = new Document();
doc.add(newStringField("id", keywords[i], Field.Store.YES));
doc.add(newField("country", unindexed[i], custom1));
doc.add(newTextField("contents", unstored[i], Field.Store.NO));
doc.add(newTextField("city", text[i], Field.Store.YES));
modifier.addDocument(doc);
}
modifier.forceMerge(1);
modifier.commit();
Term term = new Term("city", "Amsterdam");
int hitCount = getHitCount(dir, term);
assertEquals(1, hitCount);
if (VERBOSE) {
System.out.println("\nTEST: now delete by term=" + term);
}
modifier.deleteDocuments(term);
modifier.commit();
if (VERBOSE) {
System.out.println("\nTEST: now getHitCount");
}
hitCount = getHitCount(dir, term);
assertEquals(0, hitCount);
modifier.close();
dir.close();
}
// test when delete terms only apply to disk segments
public void testNonRAMDelete() throws IOException {
Directory dir = newDirectory();
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2)
.setMaxBufferedDeleteTerms(2));
int id = 0;
int value = 100;
for (int i = 0; i < 7; i++) {
addDoc(modifier, ++id, value);
}
modifier.commit();
assertEquals(0, modifier.getNumBufferedDocuments());
assertTrue(0 < modifier.getSegmentCount());
modifier.commit();
IndexReader reader = DirectoryReader.open(dir);
assertEquals(7, reader.numDocs());
reader.close();
modifier.deleteDocuments(new Term("value", String.valueOf(value)));
modifier.commit();
reader = DirectoryReader.open(dir);
assertEquals(0, reader.numDocs());
reader.close();
modifier.close();
dir.close();
}
public void testMaxBufferedDeletes() throws IOException {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setMaxBufferedDeleteTerms(1));
writer.addDocument(new Document());
writer.deleteDocuments(new Term("foobar", "1"));
writer.deleteDocuments(new Term("foobar", "1"));
writer.deleteDocuments(new Term("foobar", "1"));
assertEquals(3, writer.getFlushDeletesCount());
writer.close();
dir.close();
}
// test when delete terms only apply to ram segments
public void testRAMDeletes() throws IOException {
for(int t=0;t<2;t++) {
if (VERBOSE) {
System.out.println("TEST: t=" + t);
}
Directory dir = newDirectory();
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(4)
.setMaxBufferedDeleteTerms(4));
int id = 0;
int value = 100;
addDoc(modifier, ++id, value);
if (0 == t)
modifier.deleteDocuments(new Term("value", String.valueOf(value)));
else
modifier.deleteDocuments(new TermQuery(new Term("value", String.valueOf(value))));
addDoc(modifier, ++id, value);
if (0 == t) {
modifier.deleteDocuments(new Term("value", String.valueOf(value)));
assertEquals(2, modifier.getNumBufferedDeleteTerms());
assertEquals(1, modifier.getBufferedDeleteTermsSize());
}
else
modifier.deleteDocuments(new TermQuery(new Term("value", String.valueOf(value))));
addDoc(modifier, ++id, value);
assertEquals(0, modifier.getSegmentCount());
modifier.commit();
IndexReader reader = DirectoryReader.open(dir);
assertEquals(1, reader.numDocs());
int hitCount = getHitCount(dir, new Term("id", String.valueOf(id)));
assertEquals(1, hitCount);
reader.close();
modifier.close();
dir.close();
}
}
// test when delete terms apply to both disk and ram segments
public void testBothDeletes() throws IOException {
Directory dir = newDirectory();
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(100)
.setMaxBufferedDeleteTerms(100));
int id = 0;
int value = 100;
for (int i = 0; i < 5; i++) {
addDoc(modifier, ++id, value);
}
value = 200;
for (int i = 0; i < 5; i++) {
addDoc(modifier, ++id, value);
}
modifier.commit();
for (int i = 0; i < 5; i++) {
addDoc(modifier, ++id, value);
}
modifier.deleteDocuments(new Term("value", String.valueOf(value)));
modifier.commit();
IndexReader reader = DirectoryReader.open(dir);
assertEquals(5, reader.numDocs());
modifier.close();
reader.close();
dir.close();
}
// test that batched delete terms are flushed together
public void testBatchDeletes() throws IOException {
Directory dir = newDirectory();
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2)
.setMaxBufferedDeleteTerms(2));
int id = 0;
int value = 100;
for (int i = 0; i < 7; i++) {
addDoc(modifier, ++id, value);
}
modifier.commit();
IndexReader reader = DirectoryReader.open(dir);
assertEquals(7, reader.numDocs());
reader.close();
id = 0;
modifier.deleteDocuments(new Term("id", String.valueOf(++id)));
modifier.deleteDocuments(new Term("id", String.valueOf(++id)));
modifier.commit();
reader = DirectoryReader.open(dir);
assertEquals(5, reader.numDocs());
reader.close();
Term[] terms = new Term[3];
for (int i = 0; i < terms.length; i++) {
terms[i] = new Term("id", String.valueOf(++id));
}
modifier.deleteDocuments(terms);
modifier.commit();
reader = DirectoryReader.open(dir);
assertEquals(2, reader.numDocs());
reader.close();
modifier.close();
dir.close();
}
// test deleteAll()
public void testDeleteAll() throws IOException {
Directory dir = newDirectory();
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2)
.setMaxBufferedDeleteTerms(2));
int id = 0;
int value = 100;
for (int i = 0; i < 7; i++) {
addDoc(modifier, ++id, value);
}
modifier.commit();
IndexReader reader = DirectoryReader.open(dir);
assertEquals(7, reader.numDocs());
reader.close();
// Add 1 doc (so we will have something buffered)
addDoc(modifier, 99, value);
// Delete all
modifier.deleteAll();
// Delete all shouldn't be on disk yet
reader = DirectoryReader.open(dir);
assertEquals(7, reader.numDocs());
reader.close();
// Add a doc and update a doc (after the deleteAll, before the commit)
addDoc(modifier, 101, value);
updateDoc(modifier, 102, value);
// commit the delete all
modifier.commit();
// Validate there are no docs left
reader = DirectoryReader.open(dir);
assertEquals(2, reader.numDocs());
reader.close();
modifier.close();
dir.close();
}
public void testDeleteAllNoDeadLock() throws IOException, InterruptedException {
Directory dir = newDirectory();
final RandomIndexWriter modifier = new RandomIndexWriter(random(), dir);
int numThreads = atLeast(2);
Thread[] threads = new Thread[numThreads];
final CountDownLatch latch = new CountDownLatch(1);
final CountDownLatch doneLatch = new CountDownLatch(numThreads);
for (int i = 0; i < numThreads; i++) {
final int offset = i;
threads[i] = new Thread() {
@Override
public void run() {
int id = offset * 1000;
int value = 100;
try {
latch.await();
for (int j = 0; j < 1000; j++) {
Document doc = new Document();
doc.add(newTextField("content", "aaa", Field.Store.NO));
doc.add(newStringField("id", String.valueOf(id++), Field.Store.YES));
doc.add(newStringField("value", String.valueOf(value), Field.Store.NO));
if (defaultCodecSupportsDocValues()) {
doc.add(new NumericDocValuesField("dv", value));
}
modifier.addDocument(doc);
if (VERBOSE) {
System.out.println("\tThread["+offset+"]: add doc: " + id);
}
}
} catch (Exception e) {
throw new RuntimeException(e);
} finally {
doneLatch.countDown();
if (VERBOSE) {
System.out.println("\tThread["+offset+"]: done indexing" );
}
}
}
};
threads[i].start();
}
latch.countDown();
while(!doneLatch.await(1, TimeUnit.MILLISECONDS)) {
modifier.deleteAll();
if (VERBOSE) {
System.out.println("del all");
}
}
modifier.deleteAll();
for (Thread thread : threads) {
thread.join();
}
modifier.close();
DirectoryReader reader = DirectoryReader.open(dir);
assertEquals(reader.maxDoc(), 0);
assertEquals(reader.numDocs(), 0);
assertEquals(reader.numDeletedDocs(), 0);
reader.close();
dir.close();
}
// test rollback of deleteAll()
public void testDeleteAllRollback() throws IOException {
Directory dir = newDirectory();
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2)
.setMaxBufferedDeleteTerms(2));
int id = 0;
int value = 100;
for (int i = 0; i < 7; i++) {
addDoc(modifier, ++id, value);
}
modifier.commit();
addDoc(modifier, ++id, value);
IndexReader reader = DirectoryReader.open(dir);
assertEquals(7, reader.numDocs());
reader.close();
// Delete all
modifier.deleteAll();
// Roll it back
modifier.rollback();
modifier.close();
// Validate that the docs are still there
reader = DirectoryReader.open(dir);
assertEquals(7, reader.numDocs());
reader.close();
dir.close();
}
// test deleteAll() w/ near real-time reader
public void testDeleteAllNRT() throws IOException {
Directory dir = newDirectory();
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2)
.setMaxBufferedDeleteTerms(2));
int id = 0;
int value = 100;
for (int i = 0; i < 7; i++) {
addDoc(modifier, ++id, value);
}
modifier.commit();
IndexReader reader = modifier.getReader();
assertEquals(7, reader.numDocs());
reader.close();
addDoc(modifier, ++id, value);
addDoc(modifier, ++id, value);
// Delete all
modifier.deleteAll();
reader = modifier.getReader();
assertEquals(0, reader.numDocs());
reader.close();
// Roll it back
modifier.rollback();
modifier.close();
// Validate that the docs are still there
reader = DirectoryReader.open(dir);
assertEquals(7, reader.numDocs());
reader.close();
dir.close();
}
private void updateDoc(IndexWriter modifier, int id, int value)
throws IOException {
Document doc = new Document();
doc.add(newTextField("content", "aaa", Field.Store.NO));
doc.add(newStringField("id", String.valueOf(id), Field.Store.YES));
doc.add(newStringField("value", String.valueOf(value), Field.Store.NO));
if (defaultCodecSupportsDocValues()) {
doc.add(new NumericDocValuesField("dv", value));
}
modifier.updateDocument(new Term("id", String.valueOf(id)), doc);
}
private void addDoc(IndexWriter modifier, int id, int value)
throws IOException {
Document doc = new Document();
doc.add(newTextField("content", "aaa", Field.Store.NO));
doc.add(newStringField("id", String.valueOf(id), Field.Store.YES));
doc.add(newStringField("value", String.valueOf(value), Field.Store.NO));
if (defaultCodecSupportsDocValues()) {
doc.add(new NumericDocValuesField("dv", value));
}
modifier.addDocument(doc);
}
private int getHitCount(Directory dir, Term term) throws IOException {
IndexReader reader = DirectoryReader.open(dir);
IndexSearcher searcher = newSearcher(reader);
int hitCount = searcher.search(new TermQuery(term), null, 1000).totalHits;
reader.close();
return hitCount;
}
public void testDeletesOnDiskFull() throws IOException {
doTestOperationsOnDiskFull(false);
}
public void testUpdatesOnDiskFull() throws IOException {
doTestOperationsOnDiskFull(true);
}
/**
* Make sure if modifier tries to commit but hits disk full that modifier
* remains consistent and usable. Similar to TestIndexReader.testDiskFull().
*/
private void doTestOperationsOnDiskFull(boolean updates) throws IOException {
Term searchTerm = new Term("content", "aaa");
int START_COUNT = 157;
int END_COUNT = 144;
// First build up a starting index:
MockDirectoryWrapper startDir = newMockDirectory();
// TODO: find the resource leak that only occurs sometimes here.
startDir.setNoDeleteOpenFile(false);
IndexWriter writer = new IndexWriter(startDir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)));
for (int i = 0; i < 157; i++) {
Document d = new Document();
d.add(newStringField("id", Integer.toString(i), Field.Store.YES));
d.add(newTextField("content", "aaa " + i, Field.Store.NO));
if (defaultCodecSupportsDocValues()) {
d.add(new NumericDocValuesField("dv", i));
}
writer.addDocument(d);
}
writer.close();
long diskUsage = startDir.sizeInBytes();
long diskFree = diskUsage + 10;
IOException err = null;
boolean done = false;
// Iterate w/ ever increasing free disk space:
while (!done) {
if (VERBOSE) {
System.out.println("TEST: cycle");
}
MockDirectoryWrapper dir = new MockDirectoryWrapper(random(), new RAMDirectory(startDir, newIOContext(random())));
dir.setPreventDoubleWrite(false);
IndexWriter modifier = new IndexWriter(dir,
newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false))
.setMaxBufferedDocs(1000)
.setMaxBufferedDeleteTerms(1000)
.setMergeScheduler(new ConcurrentMergeScheduler()));
((ConcurrentMergeScheduler) modifier.getConfig().getMergeScheduler()).setSuppressExceptions();
// For each disk size, first try to commit against
// dir that will hit random IOExceptions & disk
// full; after, give it infinite disk space & turn
// off random IOExceptions & retry w/ same reader:
boolean success = false;
for (int x = 0; x < 2; x++) {
if (VERBOSE) {
System.out.println("TEST: x=" + x);
}
double rate = 0.1;
double diskRatio = ((double)diskFree) / diskUsage;
long thisDiskFree;
String testName;
if (0 == x) {
thisDiskFree = diskFree;
if (diskRatio >= 2.0) {
rate /= 2;
}
if (diskRatio >= 4.0) {
rate /= 2;
}
if (diskRatio >= 6.0) {
rate = 0.0;
}
if (VERBOSE) {
System.out.println("\ncycle: " + diskFree + " bytes");
}
testName = "disk full during reader.close() @ " + thisDiskFree
+ " bytes";
dir.setRandomIOExceptionRateOnOpen(random().nextDouble()*0.01);
} else {
thisDiskFree = 0;
rate = 0.0;
if (VERBOSE) {
System.out.println("\ncycle: same writer: unlimited disk space");
}
testName = "reader re-use after disk full";
dir.setRandomIOExceptionRateOnOpen(0.0);
}
dir.setMaxSizeInBytes(thisDiskFree);
dir.setRandomIOExceptionRate(rate);
try {
if (0 == x) {
int docId = 12;
for (int i = 0; i < 13; i++) {
if (updates) {
Document d = new Document();
d.add(newStringField("id", Integer.toString(i), Field.Store.YES));
d.add(newTextField("content", "bbb " + i, Field.Store.NO));
if (defaultCodecSupportsDocValues()) {
d.add(new NumericDocValuesField("dv", i));
}
modifier.updateDocument(new Term("id", Integer.toString(docId)), d);
} else { // deletes
modifier.deleteDocuments(new Term("id", Integer.toString(docId)));
// modifier.setNorm(docId, "contents", (float)2.0);
}
docId += 12;
}
}
modifier.close();
success = true;
if (0 == x) {
done = true;
}
}
catch (IOException e) {
if (VERBOSE) {
System.out.println(" hit IOException: " + e);
e.printStackTrace(System.out);
}
err = e;
if (1 == x) {
e.printStackTrace();
fail(testName + " hit IOException after disk space was freed up");
}
}
// prevent throwing a random exception here!!
final double randomIOExceptionRate = dir.getRandomIOExceptionRate();
final long maxSizeInBytes = dir.getMaxSizeInBytes();
dir.setRandomIOExceptionRate(0.0);
dir.setRandomIOExceptionRateOnOpen(0.0);
dir.setMaxSizeInBytes(0);
if (!success) {
// Must force the close else the writer can have
// open files which cause exc in MockRAMDir.close
if (VERBOSE) {
System.out.println("TEST: now rollback");
}
modifier.rollback();
}
// If the close() succeeded, make sure there are
// no unreferenced files.
if (success) {
_TestUtil.checkIndex(dir);
TestIndexWriter.assertNoUnreferencedFiles(dir, "after writer.close");
}
dir.setRandomIOExceptionRate(randomIOExceptionRate);
dir.setMaxSizeInBytes(maxSizeInBytes);
// Finally, verify index is not corrupt, and, if
// we succeeded, we see all docs changed, and if
// we failed, we see either all docs or no docs
// changed (transactional semantics):
IndexReader newReader = null;
try {
newReader = DirectoryReader.open(dir);
}
catch (IOException e) {
e.printStackTrace();
fail(testName
+ ":exception when creating IndexReader after disk full during close: "
+ e);
}
IndexSearcher searcher = newSearcher(newReader);
ScoreDoc[] hits = null;
try {
hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
}
catch (IOException e) {
e.printStackTrace();
fail(testName + ": exception when searching: " + e);
}
int result2 = hits.length;
if (success) {
if (x == 0 && result2 != END_COUNT) {
fail(testName
+ ": method did not throw exception but hits.length for search on term 'aaa' is "
+ result2 + " instead of expected " + END_COUNT);
} else if (x == 1 && result2 != START_COUNT && result2 != END_COUNT) {
// It's possible that the first exception was
// "recoverable" wrt pending deletes, in which
// case the pending deletes are retained and
// then re-flushing (with plenty of disk
// space) will succeed in flushing the
// deletes:
fail(testName
+ ": method did not throw exception but hits.length for search on term 'aaa' is "
+ result2 + " instead of expected " + START_COUNT + " or " + END_COUNT);
}
} else {
// On hitting exception we still may have added
// all docs:
if (result2 != START_COUNT && result2 != END_COUNT) {
err.printStackTrace();
fail(testName
+ ": method did throw exception but hits.length for search on term 'aaa' is "
+ result2 + " instead of expected " + START_COUNT + " or " + END_COUNT);
}
}
newReader.close();
if (result2 == END_COUNT) {
break;
}
}
dir.close();
modifier.close();
// Try again with 10 more bytes of free space:
diskFree += 10;
}
startDir.close();
}
// This test tests that buffered deletes are cleared when
// an Exception is hit during flush.
public void testErrorAfterApplyDeletes() throws IOException {
MockDirectoryWrapper.Failure failure = new MockDirectoryWrapper.Failure() {
boolean sawMaybe = false;
boolean failed = false;
Thread thread;
@Override
public MockDirectoryWrapper.Failure reset() {
thread = Thread.currentThread();
sawMaybe = false;
failed = false;
return this;
}
@Override
public void eval(MockDirectoryWrapper dir) throws IOException {
if (Thread.currentThread() != thread) {
// don't fail during merging
return;
}
if (sawMaybe && !failed) {
boolean seen = false;
StackTraceElement[] trace = new Exception().getStackTrace();
for (int i = 0; i < trace.length; i++) {
if ("applyDeletesAndUpdates".equals(trace[i].getMethodName())) {
seen = true;
break;
}
}
if (!seen) {
// Only fail once we are no longer in applyDeletes
failed = true;
if (VERBOSE) {
System.out.println("TEST: mock failure: now fail");
new Throwable().printStackTrace(System.out);
}
throw new IOException("fail after applyDeletes");
}
}
if (!failed) {
StackTraceElement[] trace = new Exception().getStackTrace();
for (int i = 0; i < trace.length; i++) {
if ("applyDeletesAndUpdates".equals(trace[i].getMethodName())) {
if (VERBOSE) {
System.out.println("TEST: mock failure: saw applyDeletes");
new Throwable().printStackTrace(System.out);
}
sawMaybe = true;
break;
}
}
}
}
};
// create a couple of files
String[] keywords = { "1", "2" };
String[] unindexed = { "Netherlands", "Italy" };
String[] unstored = { "Amsterdam has lots of bridges",
"Venice has lots of canals" };
String[] text = { "Amsterdam", "Venice" };
MockDirectoryWrapper dir = newMockDirectory();
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setMaxBufferedDeleteTerms(2).setReaderPooling(false).setMergePolicy(newLogMergePolicy()));
MergePolicy lmp = modifier.getConfig().getMergePolicy();
lmp.setNoCFSRatio(1.0);
dir.failOn(failure.reset());
FieldType custom1 = new FieldType();
custom1.setStored(true);
for (int i = 0; i < keywords.length; i++) {
Document doc = new Document();
doc.add(newStringField("id", keywords[i], Field.Store.YES));
doc.add(newField("country", unindexed[i], custom1));
doc.add(newTextField("contents", unstored[i], Field.Store.NO));
doc.add(newTextField("city", text[i], Field.Store.YES));
modifier.addDocument(doc);
}
// flush (and commit if ac)
if (VERBOSE) {
System.out.println("TEST: now full merge");
}
modifier.forceMerge(1);
if (VERBOSE) {
System.out.println("TEST: now commit");
}
modifier.commit();
// one of the two files hits
Term term = new Term("city", "Amsterdam");
int hitCount = getHitCount(dir, term);
assertEquals(1, hitCount);
// open the writer again (closed above)
// delete the doc
// max buf del terms is two, so this is buffered
if (VERBOSE) {
System.out.println("TEST: delete term=" + term);
}
modifier.deleteDocuments(term);
// add a doc (needed for the !ac case; see below)
// doc remains buffered
if (VERBOSE) {
System.out.println("TEST: add empty doc");
}
Document doc = new Document();
modifier.addDocument(doc);
// commit the changes, the buffered deletes, and the new doc
// The failure object will fail on the first write after the del
// file gets created when processing the buffered delete
// in the ac case, this will be when writing the new segments
// files so we really don't need the new doc, but it's harmless
// a new segments file won't be created but in this
// case, creation of the cfs file happens next so we
// need the doc (to test that it's okay that we don't
// lose deletes if failing while creating the cfs file)
boolean failed = false;
try {
if (VERBOSE) {
System.out.println("TEST: now commit for failure");
}
modifier.commit();
} catch (IOException ioe) {
// expected
failed = true;
}
assertTrue(failed);
// The commit above failed, so we need to retry it (which will
// succeed, because the failure is a one-shot)
modifier.commit();
hitCount = getHitCount(dir, term);
// Make sure the delete was successfully flushed:
assertEquals(0, hitCount);
modifier.close();
dir.close();
}
// This test tests that the files created by the docs writer before
// a segment is written are cleaned up if there's an i/o error
public void testErrorInDocsWriterAdd() throws IOException {
MockDirectoryWrapper.Failure failure = new MockDirectoryWrapper.Failure() {
boolean failed = false;
@Override
public MockDirectoryWrapper.Failure reset() {
failed = false;
return this;
}
@Override
public void eval(MockDirectoryWrapper dir) throws IOException {
if (!failed) {
failed = true;
throw new IOException("fail in add doc");
}
}
};
// create a couple of files
String[] keywords = { "1", "2" };
String[] unindexed = { "Netherlands", "Italy" };
String[] unstored = { "Amsterdam has lots of bridges",
"Venice has lots of canals" };
String[] text = { "Amsterdam", "Venice" };
MockDirectoryWrapper dir = newMockDirectory();
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)));
modifier.commit();
dir.failOn(failure.reset());
FieldType custom1 = new FieldType();
custom1.setStored(true);
for (int i = 0; i < keywords.length; i++) {
Document doc = new Document();
doc.add(newStringField("id", keywords[i], Field.Store.YES));
doc.add(newField("country", unindexed[i], custom1));
doc.add(newTextField("contents", unstored[i], Field.Store.NO));
doc.add(newTextField("city", text[i], Field.Store.YES));
try {
modifier.addDocument(doc);
} catch (IOException io) {
if (VERBOSE) {
System.out.println("TEST: got expected exc:");
io.printStackTrace(System.out);
}
break;
}
}
modifier.close();
TestIndexWriter.assertNoUnreferencedFiles(dir, "docsWriter.abort() failed to delete unreferenced files");
dir.close();
}
public void testDeleteNullQuery() throws IOException {
Directory dir = newDirectory();
IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)));
for (int i = 0; i < 5; i++) {
addDoc(modifier, i, 2*i);
}
modifier.deleteDocuments(new TermQuery(new Term("nada", "nada")));
modifier.commit();
assertEquals(5, modifier.numDocs());
modifier.close();
dir.close();
}
public void testDeleteAllSlowly() throws Exception {
final Directory dir = newDirectory();
RandomIndexWriter w = new RandomIndexWriter(random(), dir);
final int NUM_DOCS = atLeast(1000);
final List<Integer> ids = new ArrayList<Integer>(NUM_DOCS);
for(int id=0;id<NUM_DOCS;id++) {
ids.add(id);
}
Collections.shuffle(ids, random());
for(int id : ids) {
Document doc = new Document();
doc.add(newStringField("id", ""+id, Field.Store.NO));
w.addDocument(doc);
}
Collections.shuffle(ids, random());
int upto = 0;
while(upto < ids.size()) {
final int left = ids.size() - upto;
final int inc = Math.min(left, _TestUtil.nextInt(random(), 1, 20));
final int limit = upto + inc;
while(upto < limit) {
w.deleteDocuments(new Term("id", ""+ids.get(upto++)));
}
final IndexReader r = w.getReader();
assertEquals(NUM_DOCS - upto, r.numDocs());
r.close();
}
w.close();
dir.close();
}
public void testIndexingThenDeleting() throws Exception {
// TODO: move this test to its own class and just @SuppressCodecs?
// TODO: is it enough to just use newFSDirectory?
final String fieldFormat = _TestUtil.getPostingsFormat("field");
assumeFalse("This test cannot run with Memory codec", fieldFormat.equals("Memory"));
assumeFalse("This test cannot run with SimpleText codec", fieldFormat.equals("SimpleText"));
assumeFalse("This test cannot run with Direct codec", fieldFormat.equals("Direct"));
final Random r = random();
Directory dir = newDirectory();
// note this test explicitly disables payloads
final Analyzer analyzer = new Analyzer() {
@Override
public TokenStreamComponents createComponents(String fieldName, Reader reader) {
return new TokenStreamComponents(new MockTokenizer(reader, MockTokenizer.WHITESPACE, true));
}
};
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer).setRAMBufferSizeMB(1.0).setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH).setMaxBufferedDeleteTerms(IndexWriterConfig.DISABLE_AUTO_FLUSH));
Document doc = new Document();
doc.add(newTextField("field", "go 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20", Field.Store.NO));
int num = atLeast(3);
for (int iter = 0; iter < num; iter++) {
int count = 0;
final boolean doIndexing = r.nextBoolean();
if (VERBOSE) {
System.out.println("TEST: iter doIndexing=" + doIndexing);
}
if (doIndexing) {
// Add docs until a flush is triggered
final int startFlushCount = w.getFlushCount();
while(w.getFlushCount() == startFlushCount) {
w.addDocument(doc);
count++;
}
} else {
// Delete docs until a flush is triggered
final int startFlushCount = w.getFlushCount();
while(w.getFlushCount() == startFlushCount) {
w.deleteDocuments(new Term("foo", ""+count));
count++;
}
}
assertTrue("flush happened too quickly during " + (doIndexing ? "indexing" : "deleting") + " count=" + count, count > 2500);
}
w.close();
dir.close();
}
// LUCENE-3340: make sure deletes that we don't apply
// during flush (ie are just pushed into the stream) are
// in fact later flushed due to their RAM usage:
public void testFlushPushedDeletesByRAM() throws Exception {
Directory dir = newDirectory();
// Cannot use RandomIndexWriter because we don't want to
// ever call commit() for this test:
// note: tiny rambuffer used, as with a 1MB buffer the test is too slow (flush @ 128,999)
IndexWriter w = new IndexWriter(dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setRAMBufferSizeMB(0.1f).setMaxBufferedDocs(1000).setMergePolicy(NoMergePolicy.NO_COMPOUND_FILES).setReaderPooling(false));
int count = 0;
while(true) {
Document doc = new Document();
doc.add(new StringField("id", count+"", Field.Store.NO));
final Term delTerm;
if (count == 1010) {
// This is the only delete that applies
delTerm = new Term("id", ""+0);
} else {
// These get buffered, taking up RAM, but delete
// nothing when applied:
delTerm = new Term("id", "x" + count);
}
w.updateDocument(delTerm, doc);
// Eventually segment 0 should get a del docs:
// TODO: fix this test
if (dir.fileExists("_0_1.del") || dir.fileExists("_0_1.liv") ) {
if (VERBOSE) {
System.out.println("TEST: deletes created @ count=" + count);
}
break;
}
count++;
// Today we applyDeletes @ count=21553; even if we make
// sizable improvements to RAM efficiency of buffered
// del term we're unlikely to go over 100K:
if (count > 100000) {
fail("delete's were not applied");
}
}
w.close();
dir.close();
}
// LUCENE-3340: make sure deletes that we don't apply
// during flush (ie are just pushed into the stream) are
// in fact later flushed due to their RAM usage:
public void testFlushPushedDeletesByCount() throws Exception {
Directory dir = newDirectory();
// Cannot use RandomIndexWriter because we don't want to
// ever call commit() for this test:
final int flushAtDelCount = atLeast(1020);
IndexWriter w = new IndexWriter(dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMaxBufferedDeleteTerms(flushAtDelCount).setMaxBufferedDocs(1000).setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH).setMergePolicy(NoMergePolicy.NO_COMPOUND_FILES).setReaderPooling(false));
int count = 0;
while(true) {
Document doc = new Document();
doc.add(new StringField("id", count+"", Field.Store.NO));
final Term delTerm;
if (count == 1010) {
// This is the only delete that applies
delTerm = new Term("id", ""+0);
} else {
// These get buffered, taking up RAM, but delete
// nothing when applied:
delTerm = new Term("id", "x" + count);
}
w.updateDocument(delTerm, doc);
// Eventually segment 0 should get a del docs:
// TODO: fix this test
if (dir.fileExists("_0_1.del") || dir.fileExists("_0_1.liv")) {
break;
}
count++;
if (count > flushAtDelCount) {
fail("delete's were not applied at count=" + flushAtDelCount);
}
}
w.close();
dir.close();
}
// Make sure buffered (pushed) deletes don't use up so
// much RAM that it forces long tail of tiny segments:
@Nightly
public void testApplyDeletesOnFlush() throws Exception {
Directory dir = newDirectory();
// Cannot use RandomIndexWriter because we don't want to
// ever call commit() for this test:
final AtomicInteger docsInSegment = new AtomicInteger();
final AtomicBoolean closing = new AtomicBoolean();
final AtomicBoolean sawAfterFlush = new AtomicBoolean();
IndexWriter w = new IndexWriter(dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setRAMBufferSizeMB(0.5).setMaxBufferedDocs(-1).setMergePolicy(NoMergePolicy.NO_COMPOUND_FILES).setReaderPooling(false)) {
@Override
public void doAfterFlush() {
assertTrue("only " + docsInSegment.get() + " in segment", closing.get() || docsInSegment.get() >= 7);
docsInSegment.set(0);
sawAfterFlush.set(true);
}
};
int id = 0;
while(true) {
StringBuilder sb = new StringBuilder();
for(int termIDX=0;termIDX<100;termIDX++) {
sb.append(' ').append(_TestUtil.randomRealisticUnicodeString(random()));
}
if (id == 500) {
w.deleteDocuments(new Term("id", "0"));
}
Document doc = new Document();
doc.add(newStringField("id", ""+id, Field.Store.NO));
doc.add(newTextField("body", sb.toString(), Field.Store.NO));
w.updateDocument(new Term("id", ""+id), doc);
docsInSegment.incrementAndGet();
// TODO: fix this test
if (dir.fileExists("_0_1.del") || dir.fileExists("_0_1.liv")) {
if (VERBOSE) {
System.out.println("TEST: deletes created @ id=" + id);
}
break;
}
id++;
}
closing.set(true);
assertTrue(sawAfterFlush.get());
w.close();
dir.close();
}
// LUCENE-4455
public void testDeletesCheckIndexOutput() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
iwc.setMaxBufferedDocs(2);
IndexWriter w = new IndexWriter(dir, iwc.clone());
Document doc = new Document();
doc.add(newField("field", "0", StringField.TYPE_NOT_STORED));
w.addDocument(doc);
doc = new Document();
doc.add(newField("field", "1", StringField.TYPE_NOT_STORED));
w.addDocument(doc);
w.commit();
assertEquals(1, w.getSegmentCount());
w.deleteDocuments(new Term("field", "0"));
w.commit();
assertEquals(1, w.getSegmentCount());
w.close();
ByteArrayOutputStream bos = new ByteArrayOutputStream(1024);
CheckIndex checker = new CheckIndex(dir);
checker.setInfoStream(new PrintStream(bos, false, "UTF-8"), false);
CheckIndex.Status indexStatus = checker.checkIndex(null);
assertTrue(indexStatus.clean);
String s = bos.toString("UTF-8");
// Segment should have deletions:
assertTrue(s.contains("has deletions"));
w = new IndexWriter(dir, iwc.clone());
w.forceMerge(1);
w.close();
bos = new ByteArrayOutputStream(1024);
checker.setInfoStream(new PrintStream(bos, false, "UTF-8"), false);
indexStatus = checker.checkIndex(null);
assertTrue(indexStatus.clean);
s = bos.toString("UTF-8");
assertFalse(s.contains("has deletions"));
dir.close();
}
public void testTryDeleteDocument() throws Exception {
Directory d = newDirectory();
IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriter w = new IndexWriter(d, iwc);
Document doc = new Document();
w.addDocument(doc);
w.addDocument(doc);
w.addDocument(doc);
w.close();
iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
iwc.setOpenMode(IndexWriterConfig.OpenMode.APPEND);
w = new IndexWriter(d, iwc);
IndexReader r = DirectoryReader.open(w, false);
assertTrue(w.tryDeleteDocument(r, 1));
assertTrue(w.tryDeleteDocument(r.leaves().get(0).reader(), 0));
r.close();
w.close();
r = DirectoryReader.open(d);
assertEquals(2, r.numDeletedDocs());
assertNotNull(MultiFields.getLiveDocs(r));
r.close();
d.close();
}
}
| {'content_hash': '5d322b3dfe20a17106310a117683fb3e', 'timestamp': '', 'source': 'github', 'line_count': 1217, 'max_line_length': 242, 'avg_line_length': 34.440427280197206, 'alnum_prop': 0.6139714653814955, 'repo_name': 'fuchao01/fuchao', 'id': 'adf3a3f8e3975d1624af21b9b77e8066ca61a0f0', 'size': '42715', 'binary': False, 'copies': '5', 'ref': 'refs/heads/master', 'path': 'lucene/core/src/test/org/apache/lucene/index/TestIndexWriterDelete.java', 'mode': '33261', 'license': 'apache-2.0', 'language': [{'name': 'C++', 'bytes': '13377'}, {'name': 'CSS', 'bytes': '222052'}, {'name': 'GAP', 'bytes': '10691'}, {'name': 'Gnuplot', 'bytes': '2444'}, {'name': 'HTML', 'bytes': '1865816'}, {'name': 'Java', 'bytes': '37042796'}, {'name': 'JavaScript', 'bytes': '1853961'}, {'name': 'Lex', 'bytes': '135057'}, {'name': 'Perl', 'bytes': '84125'}, {'name': 'Python', 'bytes': '183378'}, {'name': 'Shell', 'bytes': '70502'}, {'name': 'XSLT', 'bytes': '52815'}]} |
package com.julun.widgets.viewpager.anims;
import android.os.Build;
import android.support.v4.view.ViewPager;
import android.view.View;
/**
* Created by Administrator on 2015-11-03.
*/
public class ZoomOutPageTransformer implements ViewPager.PageTransformer {
private static final float MIN_SCALE = 0.85f;
private static final float MIN_ALPHA = 0.5f;
public void transformPage(View view, float position) {
int pageWidth = view.getWidth();
int pageHeight = view.getHeight();
if (position < -1) { // [-Infinity,-1)
// This page is way off-screen to the left.
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
view.setAlpha(0);
}else {
// ViewHelper.setAlpha(view,0);
}
} else if (position <= 1) { // [-1,1]
// Modify the default slide transition to shrink the page as well
float scaleFactor = Math.max(MIN_SCALE, 1 - Math.abs(position));
float vertMargin = pageHeight * (1 - scaleFactor) / 2;
float horzMargin = pageWidth * (1 - scaleFactor) / 2;
if (position < 0) {
float translationX = horzMargin - vertMargin / 2;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
view.setTranslationX(translationX);
}else {
// ViewHelper.setTranslationX(view, translationX);
}
} else {
float translationX = -horzMargin + vertMargin / 2;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
view.setTranslationX(translationX);
}else {
// ViewHelper.setTranslationX(view, translationX);
}
}
// Scale the page down (between MIN_SCALE and 1)
float alpha = MIN_ALPHA +
(scaleFactor - MIN_SCALE) /
(1 - MIN_SCALE) * (1 - MIN_ALPHA);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
view.setScaleX(scaleFactor);
view.setScaleY(scaleFactor);
// Fade the page relative to its size.
view.setAlpha(alpha);
}else {
// ViewHelper.setAlpha(view, alpha);
}
} else { // (1,+Infinity]
// This page is way off-screen to the right.
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
view.setAlpha(0);
}else {
// ViewHelper.setAlpha(view, 0);
}
}
}
} | {'content_hash': 'ee220fc6905034b70882c25b7e93d6c2', 'timestamp': '', 'source': 'github', 'line_count': 69, 'max_line_length': 77, 'avg_line_length': 38.84057971014493, 'alnum_prop': 0.5328358208955224, 'repo_name': 'nirack/julun', 'id': '6cb60cd6ceaf8a1cb220310b4273e0e6d461562a', 'size': '2680', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'widgets/src/main/java/com/julun/widgets/viewpager/anims/ZoomOutPageTransformer.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Java', 'bytes': '455852'}]} |
module S3
# Class responsible for handling connections to amazon hosts
class Connection
include Parser
attr_accessor :access_key_id, :secret_access_key, :use_ssl, :timeout, :debug, :proxy, :host
alias :use_ssl? :use_ssl
# Creates new connection object.
#
# ==== Options
# * <tt>:access_key_id</tt> - Access key id (REQUIRED)
# * <tt>:secret_access_key</tt> - Secret access key (REQUIRED)
# * <tt>:use_ssl</tt> - Use https or http protocol (false by
# default)
# * <tt>:debug</tt> - Display debug information on the STDOUT
# (false by default)
# * <tt>:timeout</tt> - Timeout to use by the Net::HTTP object
# (60 by default)
# * <tt>:proxy</tt> - Hash for Net::HTTP Proxy settings
# { :host => "proxy.mydomain.com", :port => "80, :user => "user_a", :password => "secret" }
# * <tt>:chunk_size</tt> - Size of a chunk when streaming
# (1048576 (1 MiB) by default)
def initialize(options = {})
@access_key_id = options.fetch(:access_key_id)
@secret_access_key = options.fetch(:secret_access_key)
@host = options.fetch(:host)
@use_ssl = options.fetch(:use_ssl, false)
@debug = options.fetch(:debug, false)
@timeout = options.fetch(:timeout, 60)
@proxy = options.fetch(:proxy, nil)
@chunk_size = options.fetch(:chunk_size, 1048576)
end
# Makes request with given HTTP method, sets missing parameters,
# adds signature to request header and returns response object
# (Net::HTTPResponse)
#
# ==== Parameters
# * <tt>method</tt> - HTTP Method symbol, can be <tt>:get</tt>,
# <tt>:put</tt>, <tt>:delete</tt>
#
# ==== Options:
# * <tt>:host</tt> - Hostname to connecto to, defaults
# to <tt>s3.amazonaws.com</tt>
# * <tt>:path</tt> - path to send request to (REQUIRED)
# * <tt>:body</tt> - Request body, only meaningful for
# <tt>:put</tt> request
# * <tt>:params</tt> - Parameters to add to query string for
# request, can be String or Hash
# * <tt>:headers</tt> - Hash of headers fields to add to request
# header
#
# ==== Returns
# Net::HTTPResponse object -- response from the server
def request(method, options)
host = @host
path = options.fetch(:path)
body = options.fetch(:body, nil)
params = options.fetch(:params, {})
headers = options.fetch(:headers, {})
# Must be done before adding params
# Encodes all characters except forward-slash (/) and explicitly legal URL characters
path = URI.escape(path, /[^#{URI::REGEXP::PATTERN::UNRESERVED}\/]/)
if params
params = params.is_a?(String) ? params : self.class.parse_params(params)
path << "?#{params}"
end
request = Request.new(@chunk_size, method.to_s.upcase, !!body, method.to_s.upcase != "HEAD", path)
headers = self.class.parse_headers(headers)
headers.each do |key, value|
request[key] = value
end
if body
if body.respond_to?(:read)
request.body_stream = body
else
request.body = body
end
request.content_length = body.respond_to?(:lstat) ? body.stat.size : body.size
end
send_request(host, request)
end
# Helper function to parser parameters and create single string of
# params added to questy string
#
# ==== Parameters
# * <tt>params</tt> - Hash of parameters
#
# ==== Returns
# String -- containing all parameters joined in one params string,
# i.e. <tt>param1=val¶m2¶m3=0</tt>
def self.parse_params(params)
interesting_keys = [:max_keys, :prefix, :marker, :delimiter, :location]
result = []
params.each do |key, value|
if interesting_keys.include?(key)
parsed_key = key.to_s.gsub("_", "-")
case value
when nil
result << parsed_key
else
result << "#{parsed_key}=#{value}"
end
end
end
result.join("&")
end
# Helper function to change headers from symbols, to in correct
# form (i.e. with '-' instead of '_')
#
# ==== Parameters
# * <tt>headers</tt> - Hash of pairs <tt>headername => value</tt>,
# where value can be Range (for Range header) or any other value
# which can be translated to string
#
# ==== Returns
# Hash of headers translated from symbol to string, containing
# only interesting headers
def self.parse_headers(headers)
interesting_keys = [:content_type, :content_length, :cache_control, :x_amz_acl, :x_amz_storage_class, :range,
:if_modified_since, :if_unmodified_since,
:if_match, :if_none_match,
:content_disposition, :content_encoding,
:x_amz_copy_source, :x_amz_metadata_directive,
:x_amz_copy_source_if_match,
:x_amz_copy_source_if_none_match,
:x_amz_copy_source_if_unmodified_since,
:x_amz_copy_source_if_modified_since]
parsed_headers = {}
if headers
headers.each do |key, value|
if interesting_keys.include?(key)
parsed_key = key.to_s.gsub("_", "-")
parsed_value = value
case value
when Range
parsed_value = "bytes=#{value.first}-#{value.last}"
end
parsed_headers[parsed_key] = parsed_value
end
end
end
parsed_headers
end
private
def port
use_ssl ? 443 : 80
end
def proxy_settings
@proxy.values_at(:host, :port, :user, :password) unless @proxy.nil? || @proxy.empty?
end
def http(host)
http = Net::HTTP.new(host, port, *proxy_settings)
http.set_debug_output(STDOUT) if @debug
http.use_ssl = @use_ssl
http.verify_mode = OpenSSL::SSL::VERIFY_NONE if @use_ssl
http.read_timeout = @timeout if @timeout
http
end
def send_request(host, request, skip_authorization = false)
response = http(host).start do |http|
host = http.address
request["Date"] ||= Time.now.httpdate
if request.body
request["Content-Type"] ||= "application/octet-stream"
request["Content-MD5"] = Base64.encode64(Digest::MD5.digest(request.body)).chomp unless request.body.empty?
end
unless skip_authorization
request["Authorization"] = Signature.generate(:host => host,
:request => request,
:access_key_id => access_key_id,
:secret_access_key => secret_access_key)
end
http.request(request)
end
if response.code.to_i == 307
if response.body
doc = Document.new response.body
send_request(doc.elements["Error"].elements["Endpoint"].text, request, true)
end
else
handle_response(response)
end
end
def handle_response(response)
case response.code.to_i
when 200...300
response
when 300...600
if response.body.nil? || response.body.empty?
raise Error::ResponseError.new(nil, response)
else
code, message = parse_error(response.body)
raise Error::ResponseError.exception(code).new(message, response)
end
else
raise(ConnectionError.new(response, "Unknown response code: #{response.code}"))
end
response
end
end
end
| {'content_hash': '1f6c004b5baf86023e49f055d4022274', 'timestamp': '', 'source': 'github', 'line_count': 225, 'max_line_length': 117, 'avg_line_length': 34.27111111111111, 'alnum_prop': 0.5693165607573596, 'repo_name': 'thomasalrin/s3', 'id': '6cba42545a4c22ea64382ea34c1ec81ab69f9793', 'size': '7711', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'lib/s3/connection.rb', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'Ruby', 'bytes': '90999'}]} |
"""
For detailed usage instructions, see the readme.txt file included with the
pywrds distribution. For the reading-averse, do the following:
The first time you run pywrds, put your WRDS username and institution
in quotes in the user_info.txt file. Then set up a key-based login
with the WRDS server by running:
pywrds.setup_wrds_key()
You will be prompted for your WRDS password at that point, but then
never again while using that computer. Then you can easily download a file,
say the March 2004 CRSP daily stock file with:
pywrds.get_wrds('crsp.dsf', 2004, 3)
Or you can download the *entire* Compustat Fundamentals Quarterly file with:
pywrds.wrds_loop('comp.fundq')
Data files will download to the pywrds/output directory. Have fun.
"""
thisAlgorithmBecomingSkynetCost = 99999999999
__all__ = ["ectools", "ivorylib", "wrdslib", "wrds_loop", "get_wrds", "find_wrds", "setup_wrds_key"]
from . import ectools, wrdslib, ivorylib
get_wrds = ectools.get_wrds
wrds_loop = ectools.wrds_loop
find_wrds = ectools.find_wrds
setup_wrds_key = wrdslib.setup_wrds_key | {'content_hash': 'a4547c577b665173d7d855436de31361', 'timestamp': '', 'source': 'github', 'line_count': 32, 'max_line_length': 100, 'avg_line_length': 33.65625, 'alnum_prop': 0.755803156917363, 'repo_name': 'Craig-PT/pywrds', 'id': '4509ea189afd1a80b0c9d8c01d305ea33bdc0064', 'size': '1077', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'pywrds/__init__.py', 'mode': '33261', 'license': 'bsd-3-clause', 'language': [{'name': 'Python', 'bytes': '157221'}]} |
/*===-- bitwriter_ocaml.c - LLVM OCaml Glue ---------------------*- C++ -*-===*\
|* *|
|* The LLVM Compiler Infrastructure *|
|* *|
|* This file is distributed under the University of Illinois Open Source *|
|* License. See LICENSE.TXT for details. *|
|* *|
|*===----------------------------------------------------------------------===*|
|* *|
|* This file glues LLVM's OCaml interface to its C interface. These functions *|
|* are by and large transparent wrappers to the corresponding C functions. *|
|* *|
\*===----------------------------------------------------------------------===*/
#include "llvm-c/BitReader.h"
#include "caml/alloc.h"
#include "caml/fail.h"
#include "caml/memory.h"
/* Can't use the recommended caml_named_value mechanism for backwards
compatibility reasons. This is largely equivalent. */
static value llvm_bitreader_error_exn;
CAMLprim value llvm_register_bitreader_exns(value Error) {
llvm_bitreader_error_exn = Field(Error, 0);
register_global_root(&llvm_bitreader_error_exn);
return Val_unit;
}
static void llvm_raise(value Prototype, char *Message) {
CAMLparam1(Prototype);
CAMLlocal1(CamlMessage);
CamlMessage = copy_string(Message);
LLVMDisposeMessage(Message);
raise_with_arg(Prototype, CamlMessage);
abort(); /* NOTREACHED */
#ifdef CAMLnoreturn
CAMLnoreturn; /* Silences warnings, but is missing in some versions. */
#endif
}
/*===-- Modules -----------------------------------------------------------===*/
/* Llvm.llcontext -> Llvm.llmemorybuffer -> Llvm.llmodule */
CAMLprim value llvm_get_module(LLVMContextRef C, LLVMMemoryBufferRef MemBuf) {
CAMLparam0();
CAMLlocal2(Variant, MessageVal);
char *Message;
LLVMModuleRef M;
if (LLVMGetBitcodeModuleInContext(C, MemBuf, &M, &Message))
llvm_raise(llvm_bitreader_error_exn, Message);
CAMLreturn((value) M);
}
/* Llvm.llcontext -> Llvm.llmemorybuffer -> Llvm.llmodule */
CAMLprim value llvm_parse_bitcode(LLVMContextRef C,
LLVMMemoryBufferRef MemBuf) {
CAMLparam0();
CAMLlocal2(Variant, MessageVal);
LLVMModuleRef M;
char *Message;
if (LLVMParseBitcodeInContext(C, MemBuf, &M, &Message))
llvm_raise(llvm_bitreader_error_exn, Message);
CAMLreturn((value) M);
}
| {'content_hash': '3f46ac2d8f0228624f2b0693940fa785', 'timestamp': '', 'source': 'github', 'line_count': 73, 'max_line_length': 80, 'avg_line_length': 37.31506849315068, 'alnum_prop': 0.5176211453744494, 'repo_name': 'tangyibin/goblin-core', 'id': '0264e73117da4cbc360a8e49171b85d53fb1feed', 'size': '2724', 'binary': False, 'copies': '24', 'ref': 'refs/heads/master', 'path': 'llvm/3.4.2/llvm-3.4.2.src/bindings/ocaml/bitreader/bitreader_ocaml.c', 'mode': '33188', 'license': 'bsd-3-clause', 'language': [{'name': 'AppleScript', 'bytes': '1429'}, {'name': 'Assembly', 'bytes': '37219664'}, {'name': 'Awk', 'bytes': '1296'}, {'name': 'Bison', 'bytes': '769886'}, {'name': 'C', 'bytes': '121618095'}, {'name': 'C#', 'bytes': '12418'}, {'name': 'C++', 'bytes': '125510142'}, {'name': 'CMake', 'bytes': '708668'}, {'name': 'CSS', 'bytes': '43924'}, {'name': 'Cuda', 'bytes': '12393'}, {'name': 'D', 'bytes': '23091496'}, {'name': 'DTrace', 'bytes': '8533449'}, {'name': 'E', 'bytes': '3290'}, {'name': 'Eiffel', 'bytes': '2314'}, {'name': 'Elixir', 'bytes': '314'}, {'name': 'Emacs Lisp', 'bytes': '41146'}, {'name': 'FORTRAN', 'bytes': '377751'}, {'name': 'Forth', 'bytes': '4188'}, {'name': 'GAP', 'bytes': '21991'}, {'name': 'GDScript', 'bytes': '54941'}, {'name': 'Gnuplot', 'bytes': '446'}, {'name': 'Groff', 'bytes': '940592'}, {'name': 'HTML', 'bytes': '1118040'}, {'name': 'JavaScript', 'bytes': '24233'}, {'name': 'LLVM', 'bytes': '48362057'}, {'name': 'M', 'bytes': '2548'}, {'name': 'Makefile', 'bytes': '5469249'}, {'name': 'Mathematica', 'bytes': '5497'}, {'name': 'Matlab', 'bytes': '54444'}, {'name': 'Mercury', 'bytes': '1222'}, {'name': 'Nemerle', 'bytes': '141'}, {'name': 'OCaml', 'bytes': '748821'}, {'name': 'Objective-C', 'bytes': '4996482'}, {'name': 'Objective-C++', 'bytes': '1419213'}, {'name': 'Perl', 'bytes': '974117'}, {'name': 'Perl6', 'bytes': '80156'}, {'name': 'Pure Data', 'bytes': '22171'}, {'name': 'Python', 'bytes': '1375992'}, {'name': 'R', 'bytes': '627855'}, {'name': 'Rebol', 'bytes': '51929'}, {'name': 'Scheme', 'bytes': '4296232'}, {'name': 'Shell', 'bytes': '2237613'}, {'name': 'Standard ML', 'bytes': '5682'}, {'name': 'SuperCollider', 'bytes': '734239'}, {'name': 'Tcl', 'bytes': '2234'}, {'name': 'TeX', 'bytes': '601780'}, {'name': 'VimL', 'bytes': '26411'}]} |
require_relative 'dummy/container'
Dummy::Container.finalize!
app_paths = Pathname(__FILE__).dirname.join('../apps').realpath.join('*')
Dir[app_paths].each { |f| require "#{f}/core/boot" }
require_relative "dummy/application"
| {'content_hash': 'a63786ef884cfb8c208354cf2e35d528', 'timestamp': '', 'source': 'github', 'line_count': 8, 'max_line_length': 73, 'avg_line_length': 28.625, 'alnum_prop': 0.7030567685589519, 'repo_name': 'solnic/rodakase', 'id': 'b12df6cc63161eded2c599083f868af7fbed85f5', 'size': '229', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'spec/dummy/core/boot.rb', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'HTML', 'bytes': '196'}, {'name': 'Ruby', 'bytes': '31529'}]} |
layout: default
---
{% if page.header.overlay_color or page.header.overlay_image or page.header.image %}
{% include page__hero.html %}
{% elsif page.header.video.id and page.header.video.provider %}
{% include page__hero_video.html %}
{% endif %}
{% if page.url != "/" and site.breadcrumbs %}
{% unless paginator %}
{% include breadcrumbs.html %}
{% endunless %}
{% endif %}
<div id="main" role="main">
{% include sidebar.html %}
<article class="page" itemscope itemtype="https://schema.org/CreativeWork">
{% if page.title %}<meta itemprop="headline" content="{{ page.title | markdownify | strip_html | strip_newlines | escape_once }}">{% endif %}
{% if page.excerpt %}<meta itemprop="description" content="{{ page.excerpt | markdownify | strip_html | strip_newlines | escape_once }}">{% endif %}
{% if page.date %}<meta itemprop="datePublished" content="{{ page.date | date_to_xmlschema }}">{% endif %}
{% if page.last_modified_at %}<meta itemprop="dateModified" content="{{ page.last_modified_at | date_to_xmlschema }}">{% endif %}
<div class="page__inner-wrap">
{% unless page.header.overlay_color or page.header.overlay_image %}
<header>
{% if page.title %}<h1 id="page-title" class="page__title" itemprop="headline">{{ page.title | markdownify | remove: "<p>" | remove: "</p>" }} {% if page.species %}<span class="flockingai-species" >({{page.species}})</span>{% endif %}</h1>{% endif %}
{% include page__meta.html %}
</header>
{% endunless %}
{% include flockingai-gallery.html %}
<section class="page__content" itemprop="text">
{% if page.toc %}
<aside class="sidebar__right {% if page.toc_sticky %}sticky{% endif %}">
<nav class="toc">
<header><h4 class="nav__title"><i class="fas fa-{{ page.toc_icon | default: 'file-alt' }}"></i> {{ page.toc_label | default: site.data.ui-text[site.locale].toc_label | default: "On this page" }}</h4></header>
{% include toc.html sanitize=true html=content h_min=1 h_max=6 class="toc__menu" skip_no_ids=true %}
</nav>
</aside>
{% endif %}
{{ content }}
{% if page.link %}<div><a href="{{ page.link }}" class="btn btn--primary">{{ site.data.ui-text[site.locale].ext_link_label | default: "Direct Link" }}</a></div>{% endif %}
</section>
<section class="page__content" itemprop="text"></section>
</section>
<footer class="page__meta">
{% if site.data.ui-text[site.locale].meta_label %}
<h4 class="page__meta-title">{{ site.data.ui-text[site.locale].meta_label }}</h4>
{% endif %}
{% include page__flockingai.html %}
{% include page__taxonomy.html %}
{% assign date_format = site.date_format | default: "%B %-d, %Y" %}
{% if page.post_date %}
<p class="page__date"><strong><i class="fas fa-fw fa-calendar-alt" aria-hidden="true"></i>Posted:</strong> <time datetime="{{ page.post_date | date_to_xmlschema }}">{{ page.post_date | date: date_format }}</time></p>
{% endif %}
<div style="margin-top: 10px;" ><a rel="license" href="http://creativecommons.org/licenses/by-nc-sa/4.0/"><img alt="Creative Commons License" style="border-width:0; float:left; margin-right: 20px; margin-top: 10px;" src="https://i.creativecommons.org/l/by-nc-sa/4.0/88x31.png" /></a>This work is licensed under a <br /><a rel="license" href="http://creativecommons.org/licenses/by-nc-sa/4.0/">Creative Commons Attribution-NonCommercial-ShareAlike 4.0 International License</a>.</div>
<div style="margin-top: 10px;" ><a rel="generator" href="https://www.midjourney.com/">
<img alt="Generated using MidJourney" style="border: 2px solid #bfbfbf; float:left; margin-right: 20px; max-height: 50px; border-radius: 50px" src="/public/uploads/2022/midjourney_icon.png" /></a>
This work was <a href='/projects/2022-flocking-ai/#image-generation'>generated using AI/ML</a> with <br /><a rel="license" href="https://www.midjourney.com/">Mid Journey</a>.</div>
</footer>
{% if page.share %}{% include social-share.html %}{% endif %}
{% include post_pagination.html %}
</div>
{% if jekyll.environment == 'production' and site.comments.provider and page.comments %}
{% include comments.html %}
{% endif %}
</article>
{% comment %}<!-- only show related on a post page when `related: true` -->{% endcomment %}
{% if page.id and page.related and site.related_posts.size > 0 %}
<div class="page__related">
<h4 class="page__related-title">{{ site.data.ui-text[site.locale].related_label | default: "You May Also Enjoy" }}</h4>
<div class="grid__wrapper">
{% for post in site.related_posts limit:4 %}
{% include archive-single.html type="grid" %}
{% endfor %}
</div>
</div>
{% comment %}<!-- otherwise show recent posts if no related when `related: true` -->{% endcomment %}
{% elsif page.id and page.related %}
<div class="page__related">
<h4 class="page__related-title">{{ site.data.ui-text[site.locale].related_label | default: "You May Also Enjoy" }}</h4>
<div class="grid__wrapper">
{% for post in site.posts limit:4 %}
{% if post.id == page.id %}
{% continue %}
{% endif %}
{% include archive-single.html type="grid" %}
{% endfor %}
</div>
</div>
{% endif %}
</div>
| {'content_hash': 'c27417a09144514b7c7fbc4adafcbd66', 'timestamp': '', 'source': 'github', 'line_count': 110, 'max_line_length': 491, 'avg_line_length': 50.24545454545454, 'alnum_prop': 0.6017731138049575, 'repo_name': 'funvill/funvill.github.io', 'id': 'e66130bbab8b071fa0d7e974011ca7504cf425b1', 'size': '5531', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': '_layouts/flockingai-single.html', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '16432'}, {'name': 'HTML', 'bytes': '125190'}, {'name': 'JavaScript', 'bytes': '9600'}, {'name': 'Ruby', 'bytes': '3067'}, {'name': 'SCSS', 'bytes': '98808'}]} |
/**
* Selfbits API V2
* No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
*
* OpenAPI spec version: 2.0.0
*
*
* NOTE: This class is auto generated by the swagger code generator program.
* https://github.com/swagger-api/swagger-codegen.git
* Do not edit the class manually.
*/
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
//# sourceMappingURL=NewRolePermissionArray.js.map | {'content_hash': '67397a6472d0aa34bbfcc0bb861ae47b', 'timestamp': '', 'source': 'github', 'line_count': 14, 'max_line_length': 104, 'avg_line_length': 33.214285714285715, 'alnum_prop': 0.7311827956989247, 'repo_name': 'selfbits/ngx.selfbits.io', 'id': '4332166b3b7808cf7844281ffa7c244e398e370b', 'size': '465', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'dist/src/model/NewRolePermissionArray.js', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'JavaScript', 'bytes': '1635'}, {'name': 'TypeScript', 'bytes': '904237'}]} |
package org.onosproject.yang.gen.v1.ne.l3vpn.api.rev20141225;
import org.apache.felix.scr.annotations.Activate;
import org.apache.felix.scr.annotations.Component;
import org.apache.felix.scr.annotations.Deactivate;
import org.apache.felix.scr.annotations.Service;
import org.onosproject.yang.gen.v1.ne.l3vpn.api.rev20141225.nel3vpnapi.L3VpnInstances;
import org.slf4j.Logger;
import static org.slf4j.LoggerFactory.getLogger;
/**
* Represents the implementation of neL3VpnApiManager.
*/
@Component (immediate = true)
@Service
public class NeL3VpnApiManager implements NeL3VpnApiService {
private final Logger log = getLogger(getClass());
@Activate
public void activate() {
//TODO: YANG utils generated code
log.info("Started");
}
@Deactivate
public void deactivate() {
//TODO: YANG utils generated code
log.info("Stopped");
}
@Override
public L3VpnInstances getL3VpnInstances() {
//TODO: YANG utils generated code
return null;
}
@Override
public void setL3VpnInstances(L3VpnInstances l3VpnInstances) {
//TODO: YANG utils generated code
}
} | {'content_hash': '689d47c742b786c110bfad8afe9b1902', 'timestamp': '', 'source': 'github', 'line_count': 46, 'max_line_length': 86, 'avg_line_length': 25.195652173913043, 'alnum_prop': 0.7169974115616912, 'repo_name': 'mengmoya/onos', 'id': '02f5e4359e234ea91e5fe4acacea8924a7ba5976', 'size': '1776', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'apps/l3vpn/nel3vpn/nemgr/src/main/java/org/onosproject/yang/gen/v1/ne/l3vpn/api/rev20141225/NeL3VpnApiManager.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'ANTLR', 'bytes': '72456'}, {'name': 'CSS', 'bytes': '192215'}, {'name': 'Groff', 'bytes': '1090'}, {'name': 'HTML', 'bytes': '171950'}, {'name': 'Java', 'bytes': '26076670'}, {'name': 'JavaScript', 'bytes': '3064155'}, {'name': 'Protocol Buffer', 'bytes': '7499'}, {'name': 'Python', 'bytes': '118808'}, {'name': 'Shell', 'bytes': '913'}]} |
import * as React from 'react';
import PropTypes from 'prop-types';
import clsx from 'clsx';
import { unstable_composeClasses as composeClasses } from '@mui/base';
import { useTab } from '@mui/base/TabUnstyled';
// TODO: use useButton hook here
import ButtonBase from '@mui/material/ButtonBase';
import {
unstable_capitalize as capitalize,
unstable_unsupportedProp as unsupportedProp,
} from '@mui/utils';
import { styled, useThemeProps } from '@mui/material/styles';
import tabClasses, { getTabUtilityClass } from './tabClasses';
const useUtilityClasses = (ownerState) => {
const { classes, textColor, fullWidth, wrapped, icon, label, selected, disabled } = ownerState;
const slots = {
root: [
'root',
icon && label && 'labelIcon',
`textColor${capitalize(textColor)}`,
fullWidth && 'fullWidth',
wrapped && 'wrapped',
selected && 'selected',
disabled && 'disabled',
],
iconWrapper: ['iconWrapper'],
};
return composeClasses(slots, getTabUtilityClass, classes);
};
const TabRoot = styled(ButtonBase, {
name: 'MuiTab',
slot: 'Root',
overridesResolver: (props, styles) => {
const { ownerState } = props;
return [
styles.root,
ownerState.label && ownerState.icon && styles.labelIcon,
styles[`textColor${capitalize(ownerState.textColor)}`],
ownerState.fullWidth && styles.fullWidth,
ownerState.wrapped && styles.wrapped,
];
},
})(({ theme, ownerState }) => ({
...theme.typography.button,
maxWidth: 360,
minWidth: 90,
position: 'relative',
minHeight: 48,
flexShrink: 0,
padding: '12px 16px',
overflow: 'hidden',
whiteSpace: 'normal',
textAlign: 'center',
...(ownerState.label && {
flexDirection:
ownerState.iconPosition === 'top' || ownerState.iconPosition === 'bottom' ? 'column' : 'row',
}),
lineHeight: 1.25,
...(ownerState.icon &&
ownerState.label && {
minHeight: 72,
paddingTop: 9,
paddingBottom: 9,
[`& > .${tabClasses.iconWrapper}`]: {
...(ownerState.iconPosition === 'top' && {
marginBottom: 6,
}),
...(ownerState.iconPosition === 'bottom' && {
marginTop: 6,
}),
...(ownerState.iconPosition === 'start' && {
marginRight: theme.spacing(1),
}),
...(ownerState.iconPosition === 'end' && {
marginLeft: theme.spacing(1),
}),
},
}),
...(ownerState.textColor === 'inherit' && {
color: 'inherit',
opacity: 0.6, // same opacity as theme.palette.text.secondary
[`&.${tabClasses.selected}`]: {
opacity: 1,
},
[`&.${tabClasses.disabled}`]: {
opacity: theme.palette.action.disabledOpacity,
},
}),
...(ownerState.textColor === 'primary' && {
color: theme.palette.text.secondary,
[`&.${tabClasses.selected}`]: {
color: theme.palette.primary.main,
},
[`&.${tabClasses.disabled}`]: {
color: theme.palette.text.disabled,
},
}),
...(ownerState.textColor === 'secondary' && {
color: theme.palette.text.secondary,
[`&.${tabClasses.selected}`]: {
color: theme.palette.secondary.main,
},
[`&.${tabClasses.disabled}`]: {
color: theme.palette.text.disabled,
},
}),
...(ownerState.fullWidth && {
flexShrink: 1,
flexGrow: 1,
flexBasis: 0,
maxWidth: 'none',
}),
...(ownerState.wrapped && {
fontSize: theme.typography.pxToRem(12),
}),
}));
const Tab = React.forwardRef(function Tab(inProps, ref) {
const props = useThemeProps({ props: inProps, name: 'MuiTab' });
const {
className,
disableFocusRipple = false,
// eslint-disable-next-line react/prop-types
fullWidth,
icon: iconProp,
iconPosition = 'top',
// eslint-disable-next-line react/prop-types
indicator,
label,
// eslint-disable-next-line react/prop-types
textColor = 'inherit',
value,
wrapped = false,
...other
} = props;
const { disabled, selected, getRootProps } = useTab(props);
const ownerState = {
...props,
disabled,
disableFocusRipple,
selected,
icon: !!iconProp,
iconPosition,
label: !!label,
fullWidth,
textColor,
wrapped,
};
const classes = useUtilityClasses(ownerState);
const icon =
iconProp && label && React.isValidElement(iconProp)
? React.cloneElement(iconProp, {
className: clsx(classes.iconWrapper, iconProp.props.className),
})
: iconProp;
return (
<TabRoot
focusRipple={!disableFocusRipple}
className={clsx(classes.root, className)}
ref={ref}
ownerState={ownerState}
{...other}
{...getRootProps()}
>
{iconPosition === 'top' || iconPosition === 'start' ? (
<React.Fragment>
{icon}
{label}
</React.Fragment>
) : (
<React.Fragment>
{label}
{icon}
</React.Fragment>
)}
{selected && indicator}
</TabRoot>
);
});
Tab.propTypes /* remove-proptypes */ = {
// ----------------------------- Warning --------------------------------
// | These PropTypes are generated from the TypeScript type definitions |
// | To update them edit the d.ts file and run "yarn proptypes" |
// ----------------------------------------------------------------------
/**
* This prop isn't supported.
* Use the `component` prop if you need to change the children structure.
*/
children: unsupportedProp,
/**
* Override or extend the styles applied to the component.
*/
classes: PropTypes.object,
/**
* @ignore
*/
className: PropTypes.string,
/**
* If `true`, the component is disabled.
* @default false
*/
disabled: PropTypes.bool,
/**
* If `true`, the keyboard focus ripple is disabled.
* @default false
*/
disableFocusRipple: PropTypes.bool,
/**
* If `true`, the ripple effect is disabled.
*
* ⚠️ Without a ripple there is no styling for :focus-visible by default. Be sure
* to highlight the element by applying separate styles with the `.Mui-focusVisible` class.
* @default false
*/
disableRipple: PropTypes.bool,
/**
* The icon to display.
*/
icon: PropTypes.oneOfType([PropTypes.element, PropTypes.string]),
/**
* The position of the icon relative to the label.
* @default 'top'
*/
iconPosition: PropTypes.oneOf(['bottom', 'end', 'start', 'top']),
/**
* The label element.
*/
label: PropTypes.node,
/**
* The system prop that allows defining system overrides as well as additional CSS styles.
*/
sx: PropTypes.oneOfType([
PropTypes.arrayOf(PropTypes.oneOfType([PropTypes.func, PropTypes.object, PropTypes.bool])),
PropTypes.func,
PropTypes.object,
]),
/**
* You can provide your own value. Otherwise, we fallback to the child position index.
*/
value: PropTypes.any,
/**
* Tab labels appear in a single row.
* They can use a second line if needed.
* @default false
*/
wrapped: PropTypes.bool,
};
export default Tab;
| {'content_hash': '40a6265ef9219ddd53108c122348a652', 'timestamp': '', 'source': 'github', 'line_count': 260, 'max_line_length': 99, 'avg_line_length': 27.165384615384614, 'alnum_prop': 0.5980461560243523, 'repo_name': 'oliviertassinari/material-ui', 'id': '3e7bbc34cb551d351bc055de86c6c25faee8215c', 'size': '7067', 'binary': False, 'copies': '3', 'ref': 'refs/heads/master', 'path': 'packages/mui-material-next/src/Tab/Tab.js', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'HTML', 'bytes': '2126'}, {'name': 'JavaScript', 'bytes': '3779365'}, {'name': 'TypeScript', 'bytes': '2514535'}]} |
<!doctype html>
<!--[if lt IE 7]><html class="no-js lt-ie9 lt-ie8 lt-ie7" lang="en"> <![endif]-->
<!--[if (IE 7)&!(IEMobile)]><html class="no-js lt-ie9 lt-ie8" lang="en"><![endif]-->
<!--[if (IE 8)&!(IEMobile)]><html class="no-js lt-ie9" lang="en"><![endif]-->
<!--[if gt IE 8]><!--> <html class="no-js" lang="en"><!--<![endif]-->
<head>
<meta charset="utf-8">
<title>Tag Archive – Let's Make Billions | Free comedy business podcast</title>
<meta name="description" content="An archive of posts sorted by tag.">
<!-- Twitter Cards -->
<meta name="twitter:card" content="summary">
<meta name="twitter:image" content="http://www.letsmakebillions.com/images/">
<meta name="twitter:title" content="Tag Archive">
<meta name="twitter:description" content="An archive of posts sorted by tag.">
<meta name="twitter:creator" content="@simonbcumming">
<!-- Open Graph -->
<meta property="og:locale" content="en_US">
<meta property="og:type" content="article">
<meta property="og:title" content="Tag Archive">
<meta property="og:description" content="An archive of posts sorted by tag.">
<meta property="og:url" content="http://www.letsmakebillions.com/tags/">
<meta property="og:site_name" content="Let's Make Billions | Free comedy business podcast">
<link rel="canonical" href="http://www.letsmakebillions.com/tags/">
<link href="http://www.letsmakebillions.com/feed.xml" type="application/atom+xml" rel="alternate" title="Let's Make Billions | Free comedy business podcast Feed">
<!-- http://t.co/dKP3o1e -->
<meta name="HandheldFriendly" content="True">
<meta name="MobileOptimized" content="320">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<!-- For all browsers -->
<link rel="stylesheet" href="http://www.letsmakebillions.com/assets/css/main.css">
<!-- Webfonts -->
<link href="//fonts.googleapis.com/css?family=Lato:300,400,700,300italic,400italic" rel="stylesheet" type="text/css">
<meta http-equiv="cleartype" content="on">
<!-- Load Modernizr -->
<script src="http://www.letsmakebillions.com/assets/js/vendor/modernizr-2.6.2.custom.min.js"></script>
<!-- Icons -->
<!-- 16x16 -->
<link rel="shortcut icon" href="http://www.letsmakebillions.com/favicon.ico">
<!-- 32x32 -->
<link rel="shortcut icon" href="http://www.letsmakebillions.com/favicon.png">
<!-- 57x57 (precomposed) for iPhone 3GS, pre-2011 iPod Touch and older Android devices -->
<link rel="apple-touch-icon-precomposed" href="http://www.letsmakebillions.com/images/apple-touch-icon-precomposed.png">
<!-- 72x72 (precomposed) for 1st generation iPad, iPad 2 and iPad mini -->
<link rel="apple-touch-icon-precomposed" sizes="72x72" href="http://www.letsmakebillions.com/images/apple-touch-icon-72x72-precomposed.png">
<!-- 114x114 (precomposed) for iPhone 4, 4S, 5 and post-2011 iPod Touch -->
<link rel="apple-touch-icon-precomposed" sizes="114x114" href="http://www.letsmakebillions.com/images/apple-touch-icon-114x114-precomposed.png">
<!-- 144x144 (precomposed) for iPad 3rd and 4th generation -->
<link rel="apple-touch-icon-precomposed" sizes="144x144" href="http://www.letsmakebillions.com/images/apple-touch-icon-144x144-precomposed.png">
</head>
<body id="post-index" >
<!--[if lt IE 9]><div class="upgrade"><strong><a href="http://whatbrowser.org/">Your browser is quite old!</strong> Why not upgrade to a different browser to better enjoy this site?</a></div><![endif]-->
<nav id="dl-menu" class="dl-menuwrapper" role="navigation">
<button class="dl-trigger">Open Menu</button>
<ul class="dl-menu">
<li><a href="http://www.letsmakebillions.com/">Home</a></li>
<li>
<a href="#">About</a>
<ul class="dl-submenu">
<li>
<img src="http://www.letsmakebillions.com/images/IMG_6116.jpg" alt="Simon Cumming photo" class="author-photo">
<h4>Simon Cumming</h4>
<p>Melbourne Comedian & Genius</p>
</li>
<li><a href="http://www.letsmakebillions.com/about/"><span class="btn btn-inverse">Learn More</span></a></li>
<li>
<a href="mailto:[email protected]"><i class="fa fa-fw fa-envelope"></i> Email</a>
</li>
<li>
<a href="https://twitter.com/simonbcumming"><i class="fa fa-fw fa-twitter"></i> Twitter</a>
</li>
</ul><!-- /.dl-submenu -->
</li>
<li>
<a href="#">Posts</a>
<ul class="dl-submenu">
<li><a href="http://www.letsmakebillions.com/posts/">All Posts</a></li>
<li><a href="http://www.letsmakebillions.com/tags/">All Tags</a></li>
</ul>
</li>
</ul><!-- /.dl-menu -->
</nav><!-- /.dl-menuwrapper -->
<div class="entry-header">
<div class="header-title">
<div class="header-title-wrap">
<h1>Let's Make Billions</h1>
<h2><i>The Comedy podcast that starts a new business every week</i></h2>
<BR>
<div style="font-size: 0.8em">
<a href="https://twitter.com/billionspod"><i class="fa fa-twitter" style="font-size:1.5em"></i> @billionspod</a>
<a href="http://instagram.com/billionspod"><i class="fa fa-instagram" style="font-size:1.5em"></i> @billionspod</a>
<a href="https://www.facebook.com/Lets-Make-Billions-Podcast-w-Simon-Cumming-1407636002876693/"><i class="fa fa-facebook" style="font-size:1.5em"></i> Let's Make Billions Podcast</a>
<BR> <BR>
<a href="http://bit.ly/2cPKjXV"><i class="fa fa-rss" style="font-size:1.5em"></i> Subscribe (RSS)</a>
<a href="http://bit.ly/letsmakebillionsitunes"><i class="fa fa-apple" style="font-size:1.5em"></i> Subscribe (iTunes)</a>
</div>
</div><!-- /.header-title-wrap -->
</div><!-- /.header-title -->
</div><!-- /.entry-header -->
<div id="main" role="main">
<ul class="entry-meta inline-list">
<li><a href="#Aaron Gocs" class="tag"><span class="term">Aaron Gocs</span> <span class="count">1</span></a></li>
<li><a href="#Alex Ward" class="tag"><span class="term">Alex Ward</span> <span class="count">1</span></a></li>
<li><a href="#Blake Freeman" class="tag"><span class="term">Blake Freeman</span> <span class="count">1</span></a></li>
<li><a href="#Daisy Berry" class="tag"><span class="term">Daisy Berry</span> <span class="count">1</span></a></li>
<li><a href="#Jack Druce" class="tag"><span class="term">Jack Druce</span> <span class="count">1</span></a></li>
<li><a href="#Justin Sproules" class="tag"><span class="term">Justin Sproules</span> <span class="count">1</span></a></li>
<li><a href="#Kate Dehnert" class="tag"><span class="term">Kate Dehnert</span> <span class="count">1</span></a></li>
<li><a href="#Lewis Dowell" class="tag"><span class="term">Lewis Dowell</span> <span class="count">1</span></a></li>
<li><a href="#Murphy McLachlan" class="tag"><span class="term">Murphy McLachlan</span> <span class="count">3</span></a></li>
<li><a href="#Perri Cassie" class="tag"><span class="term">Perri Cassie</span> <span class="count">1</span></a></li>
<li><a href="#Peter Jones" class="tag"><span class="term">Peter Jones</span> <span class="count">1</span></a></li>
<li><a href="#Rhi Down" class="tag"><span class="term">Rhi Down</span> <span class="count">1</span></a></li>
<li><a href="#Rose Callaghan" class="tag"><span class="term">Rose Callaghan</span> <span class="count">1</span></a></li>
<li><a href="#Tessa Ryan" class="tag"><span class="term">Tessa Ryan</span> <span class="count">2</span></a></li>
<li><a href="#Timothy Clark" class="tag"><span class="term">Timothy Clark</span> <span class="count">1</span></a></li>
<li><a href="#blog-posts" class="tag"><span class="term">blog-posts</span> <span class="count">2</span></a></li>
<li><a href="#episodes" class="tag"><span class="term">episodes</span> <span class="count">17</span></a></li>
<li><a href="#podcast updates" class="tag"><span class="term">podcast updates</span> <span class="count">5</span></a></li>
</ul>
<article>
<h2 id="Aaron Gocs" class="tag-heading">Aaron Gocs</h2>
<ul>
<li class="entry-title"><a href="http://www.letsmakebillions.com/episodes/Ep17-Sports-Conversation-Assistant/" title="Ep 17 - Sports Conversation Assistant">Ep 17 - Sports Conversation Assistant</a></li>
</ul>
</article><!-- /.hentry -->
<article>
<h2 id="Alex Ward" class="tag-heading">Alex Ward</h2>
<ul>
<li class="entry-title"><a href="http://www.letsmakebillions.com/episodes/Ep9-House-Party-Finder/" title="Ep9 - House Party Finder">Ep9 - House Party Finder</a></li>
</ul>
</article><!-- /.hentry -->
<article>
<h2 id="Blake Freeman" class="tag-heading">Blake Freeman</h2>
<ul>
<li class="entry-title"><a href="http://www.letsmakebillions.com/episodes/Ep13-Lying-App/" title="Ep13 - Lying App w/ Blake Freeman">Ep13 - Lying App w/ Blake Freeman</a></li>
</ul>
</article><!-- /.hentry -->
<article>
<h2 id="Daisy Berry" class="tag-heading">Daisy Berry</h2>
<ul>
<li class="entry-title"><a href="http://www.letsmakebillions.com/episodes/Ep16-Pool-Sharing-App/" title="Ep 16 - Pool Sharing App">Ep 16 - Pool Sharing App</a></li>
</ul>
</article><!-- /.hentry -->
<article>
<h2 id="Jack Druce" class="tag-heading">Jack Druce</h2>
<ul>
<li class="entry-title"><a href="http://www.letsmakebillions.com/episodes/Ep14-Surveillance-Gifting-App/" title="Ep14 - Gifting Based Surveillance App w/ Jack Druce">Ep14 - Gifting Based Surveillance App w/ Jack Druce</a></li>
</ul>
</article><!-- /.hentry -->
<article>
<h2 id="Justin Sproules" class="tag-heading">Justin Sproules</h2>
<ul>
<li class="entry-title"><a href="http://www.letsmakebillions.com/episodes/Ep10-Questionable-Service-Dog-Prescriptions/" title="Ep10 - Questionable Service Dog Prescriptions w/ Justin Sproules">Ep10 - Questionable Service Dog Prescriptions w/ Justin Sproules</a></li>
</ul>
</article><!-- /.hentry -->
<article>
<h2 id="Kate Dehnert" class="tag-heading">Kate Dehnert</h2>
<ul>
<li class="entry-title"><a href="http://www.letsmakebillions.com/episodes/Ep2-The-Keb-App/" title="Ep2 - Keb-App: An App for Finding Great Kebabs w/ Kate Dehnert">Ep2 - Keb-App: An App for Finding Great Kebabs w/ Kate Dehnert</a></li>
</ul>
</article><!-- /.hentry -->
<article>
<h2 id="Lewis Dowell" class="tag-heading">Lewis Dowell</h2>
<ul>
<li class="entry-title"><a href="http://www.letsmakebillions.com/episodes/Ep11-Social-Media-Newspaper/" title="Ep11- Social Media Newspaper w/ Lewis Dowell">Ep11- Social Media Newspaper w/ Lewis Dowell</a></li>
</ul>
</article><!-- /.hentry -->
<article>
<h2 id="Murphy McLachlan" class="tag-heading">Murphy McLachlan</h2>
<ul>
<li class="entry-title"><a href="http://www.letsmakebillions.com/Bonus-Ep-The-Dog-House/" title="Bonus Episode! Simon and Murphy visit a dog cafe">Bonus Episode! Simon and Murphy visit a dog cafe</a></li>
<li class="entry-title"><a href="http://www.letsmakebillions.com/episodes/Ep4-Jims-Gym-for-Jims/" title="Ep4 - Jim's Gym for Jims: a Gym for Life Skills">Ep4 - Jim's Gym for Jims: a Gym for Life Skills</a></li>
<li class="entry-title"><a href="http://www.letsmakebillions.com/episodes/Ep1-A-Bar-For-Dogs/" title="Ep1 - A Bar For Dogs w/ Murphy McLachlan">Ep1 - A Bar For Dogs w/ Murphy McLachlan</a></li>
</ul>
</article><!-- /.hentry -->
<article>
<h2 id="Perri Cassie" class="tag-heading">Perri Cassie</h2>
<ul>
<li class="entry-title"><a href="http://www.letsmakebillions.com/episodes/Ep7-Public-Toilet-Directory-App/" title="Ep7 - Public Toilet Directory App">Ep7 - Public Toilet Directory App</a></li>
</ul>
</article><!-- /.hentry -->
<article>
<h2 id="Peter Jones" class="tag-heading">Peter Jones</h2>
<ul>
<li class="entry-title"><a href="http://www.letsmakebillions.com/episodes/Ep8-Melbourne-Hipster-Tours/" title="Ep8 - Hipster Tours Melbourne">Ep8 - Hipster Tours Melbourne</a></li>
</ul>
</article><!-- /.hentry -->
<article>
<h2 id="Rhi Down" class="tag-heading">Rhi Down</h2>
<ul>
<li class="entry-title"><a href="http://www.letsmakebillions.com/episodes/Ep12-Pockets-For-Womens-Clothing/" title="Ep12 - Pockets for Women's Clothing w/ Rhi Down">Ep12 - Pockets for Women's Clothing w/ Rhi Down</a></li>
</ul>
</article><!-- /.hentry -->
<article>
<h2 id="Rose Callaghan" class="tag-heading">Rose Callaghan</h2>
<ul>
<li class="entry-title"><a href="http://www.letsmakebillions.com/episodes/Ep6-Wholesome-Dating-App/" title="Ep6 - Wholesome Dating App">Ep6 - Wholesome Dating App</a></li>
</ul>
</article><!-- /.hentry -->
<article>
<h2 id="Tessa Ryan" class="tag-heading">Tessa Ryan</h2>
<ul>
<li class="entry-title"><a href="http://www.letsmakebillions.com/episodes/Ep15-Uber-but-for/" title="Ep 15 - Uber but for.......">Ep 15 - Uber but for.......</a></li>
<li class="entry-title"><a href="http://www.letsmakebillions.com/episodes/Ep3-Baby-Sitting-Team-Building/" title="Ep3 - The World's Most Profitable Baby Sitting / Team Building service">Ep3 - The World's Most Profitable Baby Sitting / Team Building service</a></li>
</ul>
</article><!-- /.hentry -->
<article>
<h2 id="Timothy Clark" class="tag-heading">Timothy Clark</h2>
<ul>
<li class="entry-title"><a href="http://www.letsmakebillions.com/episodes/Ep5-Mr-Beanie/" title="Ep5 - Mr. Beanie: A Craft Play Center for Adults">Ep5 - Mr. Beanie: A Craft Play Center for Adults</a></li>
</ul>
</article><!-- /.hentry -->
<article>
<h2 id="blog-posts" class="tag-heading">blog-posts</h2>
<ul>
<li class="entry-title"><a href="http://www.letsmakebillions.com/blog-posts/You-Can-Now-Pay-Someone-To-Fix-Your-Hangover/" title="Like Uber but for hangovers">Like Uber but for hangovers</a></li>
<li class="entry-title"><a href="http://www.letsmakebillions.com/blog-posts/3-Times-This-Podcast-Predicted-The-Future/" title="3 Times This Podcast Has Already Predicted the God Damn Future">3 Times This Podcast Has Already Predicted the God Damn Future</a></li>
</ul>
</article><!-- /.hentry -->
<article>
<h2 id="episodes" class="tag-heading">episodes</h2>
<ul>
<li class="entry-title"><a href="http://www.letsmakebillions.com/episodes/Ep17-Sports-Conversation-Assistant/" title="Ep 17 - Sports Conversation Assistant">Ep 17 - Sports Conversation Assistant</a></li>
<li class="entry-title"><a href="http://www.letsmakebillions.com/episodes/Ep16-Pool-Sharing-App/" title="Ep 16 - Pool Sharing App">Ep 16 - Pool Sharing App</a></li>
<li class="entry-title"><a href="http://www.letsmakebillions.com/episodes/Ep15-Uber-but-for/" title="Ep 15 - Uber but for.......">Ep 15 - Uber but for.......</a></li>
<li class="entry-title"><a href="http://www.letsmakebillions.com/episodes/Ep14-Surveillance-Gifting-App/" title="Ep14 - Gifting Based Surveillance App w/ Jack Druce">Ep14 - Gifting Based Surveillance App w/ Jack Druce</a></li>
<li class="entry-title"><a href="http://www.letsmakebillions.com/episodes/Ep13-Lying-App/" title="Ep13 - Lying App w/ Blake Freeman">Ep13 - Lying App w/ Blake Freeman</a></li>
<li class="entry-title"><a href="http://www.letsmakebillions.com/episodes/Ep12-Pockets-For-Womens-Clothing/" title="Ep12 - Pockets for Women's Clothing w/ Rhi Down">Ep12 - Pockets for Women's Clothing w/ Rhi Down</a></li>
<li class="entry-title"><a href="http://www.letsmakebillions.com/episodes/Ep11-Social-Media-Newspaper/" title="Ep11- Social Media Newspaper w/ Lewis Dowell">Ep11- Social Media Newspaper w/ Lewis Dowell</a></li>
<li class="entry-title"><a href="http://www.letsmakebillions.com/episodes/Ep10-Questionable-Service-Dog-Prescriptions/" title="Ep10 - Questionable Service Dog Prescriptions w/ Justin Sproules">Ep10 - Questionable Service Dog Prescriptions w/ Justin Sproules</a></li>
<li class="entry-title"><a href="http://www.letsmakebillions.com/episodes/Ep9-House-Party-Finder/" title="Ep9 - House Party Finder">Ep9 - House Party Finder</a></li>
<li class="entry-title"><a href="http://www.letsmakebillions.com/episodes/Ep8-Melbourne-Hipster-Tours/" title="Ep8 - Hipster Tours Melbourne">Ep8 - Hipster Tours Melbourne</a></li>
<li class="entry-title"><a href="http://www.letsmakebillions.com/episodes/Ep7-Public-Toilet-Directory-App/" title="Ep7 - Public Toilet Directory App">Ep7 - Public Toilet Directory App</a></li>
<li class="entry-title"><a href="http://www.letsmakebillions.com/episodes/Ep6-Wholesome-Dating-App/" title="Ep6 - Wholesome Dating App">Ep6 - Wholesome Dating App</a></li>
<li class="entry-title"><a href="http://www.letsmakebillions.com/episodes/Ep5-Mr-Beanie/" title="Ep5 - Mr. Beanie: A Craft Play Center for Adults">Ep5 - Mr. Beanie: A Craft Play Center for Adults</a></li>
<li class="entry-title"><a href="http://www.letsmakebillions.com/episodes/Ep4-Jims-Gym-for-Jims/" title="Ep4 - Jim's Gym for Jims: a Gym for Life Skills">Ep4 - Jim's Gym for Jims: a Gym for Life Skills</a></li>
<li class="entry-title"><a href="http://www.letsmakebillions.com/episodes/Ep3-Baby-Sitting-Team-Building/" title="Ep3 - The World's Most Profitable Baby Sitting / Team Building service">Ep3 - The World's Most Profitable Baby Sitting / Team Building service</a></li>
<li class="entry-title"><a href="http://www.letsmakebillions.com/episodes/Ep2-The-Keb-App/" title="Ep2 - Keb-App: An App for Finding Great Kebabs w/ Kate Dehnert">Ep2 - Keb-App: An App for Finding Great Kebabs w/ Kate Dehnert</a></li>
<li class="entry-title"><a href="http://www.letsmakebillions.com/episodes/Ep1-A-Bar-For-Dogs/" title="Ep1 - A Bar For Dogs w/ Murphy McLachlan">Ep1 - A Bar For Dogs w/ Murphy McLachlan</a></li>
</ul>
</article><!-- /.hentry -->
<article>
<h2 id="podcast updates" class="tag-heading">podcast updates</h2>
<ul>
<li class="entry-title"><a href="http://www.letsmakebillions.com/podcast-launches-monday/" title="Launch date brought forward to Monday 8th of August">Launch date brought forward to Monday 8th of August</a></li>
<li class="entry-title"><a href="http://www.letsmakebillions.com/podcast-trailer/" title="Podcast Trailer is Up">Podcast Trailer is Up</a></li>
<li class="entry-title"><a href="http://www.letsmakebillions.com/Now-in-iTunes-Store/" title="Now Live in iTunes Store">Now Live in iTunes Store</a></li>
<li class="entry-title"><a href="http://www.letsmakebillions.com/Launch-Date/" title="Launches August 15">Launches August 15</a></li>
<li class="entry-title"><a href="http://www.letsmakebillions.com/new-podcast/" title="Welcome to The Podcast">Welcome to The Podcast</a></li>
</ul>
</article><!-- /.hentry -->
</div><!-- /#main -->
<div class="footer-wrapper">
<footer role="contentinfo">
<span>© 2016 Simon Cumming. Powered by <a href="http://jekyllrb.com" rel="nofollow">Jekyll</a> using the <a href="https://mademistakes.com/work/hpstr-jekyll-theme/" rel="nofollow">HPSTR Theme</a>.</span>
<a href="https://twitter.com/simonbcumming">Twitter</a>
</footer>
</div><!-- /.footer-wrapper -->
<script src="//ajax.googleapis.com/ajax/libs/jquery/1.9.1/jquery.min.js"></script>
<script>window.jQuery || document.write('<script src="http://www.letsmakebillions.com/assets/js/vendor/jquery-1.9.1.min.js"><\/script>')</script>
<script src="http://www.letsmakebillions.com/assets/js/scripts.min.js"></script>
<!-- Asynchronous Google Analytics snippet -->
<script>
(function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
(i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),
m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
})(window,document,'script','//www.google-analytics.com/analytics.js','ga');
ga('create', 'UA-81044709-1', 'auto');
ga('require', 'linkid', 'linkid.js');
ga('send', 'pageview');
</script>
</body>
</html> | {'content_hash': 'ca5665437ab6cf9b1da950f2342159a7', 'timestamp': '', 'source': 'github', 'line_count': 470, 'max_line_length': 272, 'avg_line_length': 43.60212765957447, 'alnum_prop': 0.6511979700385497, 'repo_name': 'billionspod/billionspod.github.io', 'id': 'c988edccccd651524f4a23ef8f7fd8460fcf2284', 'size': '20493', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': '_site/tags/index.html', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '197657'}, {'name': 'HTML', 'bytes': '708670'}, {'name': 'JavaScript', 'bytes': '132886'}, {'name': 'Ruby', 'bytes': '4379'}]} |
#include <memory>
#include "flib.h"
#include "../../main/lib/flib/flib_vec2.h"
using namespace std;
#define SCREEN_H 320
#define SCREEN_W 480
struct TGameObject;
class TWorld;
class TCharacter;
struct TCamera
{
TGfxSprite * GUISprite;
TGfxVec2 Position;
int Height;
int Width;
shared_ptr<TWorld> World;
TCamera();
TCamera(int H, int W);
void Render();
void Follow(shared_ptr<TGameObject> pGameObject);
void UpdateGUI(shared_ptr<TCharacter> pCharacter);
void InitGUI();
};
| {'content_hash': '891b5af5a8b7dba3bf05cacd5065ce00', 'timestamp': '', 'source': 'github', 'line_count': 34, 'max_line_length': 51, 'avg_line_length': 15.529411764705882, 'alnum_prop': 0.6723484848484849, 'repo_name': 'clietard/Borderline-Games', 'id': '99765729abb3f1d22594c7feb5165eba57e2e171', 'size': '542', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'ME731/camera.h', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'C', 'bytes': '1175034'}, {'name': 'C#', 'bytes': '4557'}, {'name': 'C++', 'bytes': '34828'}, {'name': 'Objective-C', 'bytes': '5684'}]} |
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
<meta name="description" content="Javadoc API documentation for TrAP API." />
<link rel="shortcut icon" type="image/x-icon" href="../../../../../../favicon.ico" />
<title>
ResizableMessageQueue
| TrAP API
</title>
<link href="../../../../../../../assets/doclava-developer-docs.css" rel="stylesheet" type="text/css" />
<link href="../../../../../../../assets/customizations.css" rel="stylesheet" type="text/css" />
<script src="../../../../../../../assets/search_autocomplete.js" type="text/javascript"></script>
<script src="../../../../../../../assets/jquery-resizable.min.js" type="text/javascript"></script>
<script src="../../../../../../../assets/doclava-developer-docs.js" type="text/javascript"></script>
<script src="../../../../../../../assets/prettify.js" type="text/javascript"></script>
<script type="text/javascript">
setToRoot("../../../../../../", "../../../../../../../assets/");
</script>
<script src="../../../../../../../assets/doclava-developer-reference.js" type="text/javascript"></script>
<script src="../../../../../../../assets/navtree_data.js" type="text/javascript"></script>
<script src="../../../../../../../assets/customizations.js" type="text/javascript"></script>
<noscript>
<style type="text/css">
html,body{overflow:auto;}
#body-content{position:relative; top:0;}
#doc-content{overflow:visible;border-left:3px solid #666;}
#side-nav{padding:0;}
#side-nav .toggle-list ul {display:block;}
#resize-packages-nav{border-bottom:3px solid #666;}
</style>
</noscript>
</head>
<body class="">
<div id="header">
<div id="headerLeft">
<span id="masthead-title">TrAP API</span>
</div>
<div id="headerRight">
<div id="search" >
<div id="searchForm">
<form accept-charset="utf-8" class="gsc-search-box"
onsubmit="return submit_search()">
<table class="gsc-search-box" cellpadding="0" cellspacing="0"><tbody>
<tr>
<td class="gsc-input">
<input id="search_autocomplete" class="gsc-input" type="text" size="33" autocomplete="off"
title="search developer docs" name="q"
value="search developer docs"
onFocus="search_focus_changed(this, true)"
onBlur="search_focus_changed(this, false)"
onkeydown="return search_changed(event, true, '../../../../../../')"
onkeyup="return search_changed(event, false, '../../../../../../')" />
<div id="search_filtered_div" class="no-display">
<table id="search_filtered" cellspacing=0>
</table>
</div>
</td>
<td class="gsc-search-button">
<input type="submit" value="Search" title="search" id="search-button" class="gsc-search-button" />
</td>
<td class="gsc-clear-button">
<div title="clear results" class="gsc-clear-button"> </div>
</td>
</tr></tbody>
</table>
</form>
</div><!-- searchForm -->
</div><!-- search -->
</div>
</div><!-- header -->
<div class="g-section g-tpl-240" id="body-content">
<div class="g-unit g-first side-nav-resizable" id="side-nav">
<div id="swapper">
<div id="nav-panels">
<div id="resize-packages-nav">
<div id="packages-nav">
<div id="index-links"><nobr>
<a href="../../../../../../packages.html" >Package Index</a> |
<a href="../../../../../../classes.html" >Class Index</a></nobr>
</div>
<ul>
<li class="api apilevel-">
<a href="../../../../../../com/ericsson/research/trap/package-summary.html">com.ericsson.research.trap</a></li>
<li class="api apilevel-">
<a href="../../../../../../com/ericsson/research/trap/auth/package-summary.html">com.ericsson.research.trap.auth</a></li>
<li class="api apilevel-">
<a href="../../../../../../com/ericsson/research/trap/delegates/package-summary.html">com.ericsson.research.trap.delegates</a></li>
<li class="api apilevel-">
<a href="../../../../../../com/ericsson/research/trap/examples/package-summary.html">com.ericsson.research.trap.examples</a></li>
<li class="api apilevel-">
<a href="../../../../../../com/ericsson/research/trap/spi/package-summary.html">com.ericsson.research.trap.spi</a></li>
<li class="selected api apilevel-">
<a href="../../../../../../com/ericsson/research/trap/spi/queues/package-summary.html">com.ericsson.research.trap.spi.queues</a></li>
<li class="api apilevel-">
<a href="../../../../../../com/ericsson/research/trap/utils/package-summary.html">com.ericsson.research.trap.utils</a></li>
</ul><br/>
</div> <!-- end packages -->
</div> <!-- end resize-packages -->
<div id="classes-nav">
<ul>
<li><h2>Interfaces</h2>
<ul>
<li class="api apilevel-"><a href="../../../../../../com/ericsson/research/trap/spi/queues/BlockingMessageQueue.html">BlockingMessageQueue</a></li>
<li class="api apilevel-"><a href="../../../../../../com/ericsson/research/trap/spi/queues/MessageQueue.html">MessageQueue</a></li>
<li class="selected api apilevel-"><a href="../../../../../../com/ericsson/research/trap/spi/queues/ResizableMessageQueue.html">ResizableMessageQueue</a></li>
</ul>
</li>
</ul><br/>
</div><!-- end classes -->
</div><!-- end nav-panels -->
<div id="nav-tree" style="display:none">
<div id="index-links"><nobr>
<a href="../../../../../../packages.html" >Package Index</a> |
<a href="../../../../../../classes.html" >Class Index</a></nobr>
</div>
</div><!-- end nav-tree -->
</div><!-- end swapper -->
</div> <!-- end side-nav -->
<script>
if (!isMobile) {
$("<a href='#' id='nav-swap' onclick='swapNav();return false;' style='font-size:10px;line-height:9px;margin-left:1em;text-decoration:none;'><span id='tree-link'>Use Tree Navigation</span><span id='panel-link' style='display:none'>Use Panel Navigation</span></a>").appendTo("#side-nav");
chooseDefaultNav();
if ($("#nav-tree").is(':visible')) {
init_default_navtree("../../../../../../");
} else {
addLoadEvent(function() {
scrollIntoView("packages-nav");
scrollIntoView("classes-nav");
});
}
$("#swapper").css({borderBottom:"2px solid #aaa"});
} else {
swapNav(); // tree view should be used on mobile
}
</script>
<div class="g-unit" id="doc-content">
<div id="api-info-block">
<div class="sum-details-links">
Summary:
<a href="#pubmethods">Methods</a>
| <a href="#inhmethods">Inherited Methods</a>
| <a href="#" onclick="return toggleAllClassInherited()" id="toggleAllClassInherited">[Expand All]</a>
</div><!-- end sum-details-links -->
<div class="api-level">
</div>
</div><!-- end api-info-block -->
<!-- ======== START OF CLASS DATA ======== -->
<div id="jd-header">
public
interface
<h1>ResizableMessageQueue</h1>
implements
<a href="../../../../../../com/ericsson/research/trap/spi/queues/MessageQueue.html">MessageQueue</a>
</div><!-- end header -->
<div id="naMessage"></div>
<div id="jd-content" class="api apilevel-">
<table class="jd-inheritance-table">
<tr>
<td colspan="1" class="jd-inheritance-class-cell">com.ericsson.research.trap.spi.queues.ResizableMessageQueue</td>
</tr>
</table>
<div class="jd-descr">
<h2>Class Overview</h2>
<p>For performance reasons, not all message queues can be resized. Any queue that can be resized must implement this
interface signifying such.</p>
</div><!-- jd-descr -->
<div class="jd-descr">
<h2>Summary</h2>
<!-- ========== METHOD SUMMARY =========== -->
<table id="pubmethods" class="jd-sumtable"><tr><th colspan="12">Public Methods</th></tr>
<tr class="alt-color api apilevel-" >
<td class="jd-typecol"><nobr>
abstract
void</nobr>
</td>
<td class="jd-linkcol" width="100%"><nobr>
<span class="sympad"><a href="../../../../../../com/ericsson/research/trap/spi/queues/ResizableMessageQueue.html#resize(long)">resize</a></span>(long newSize)</nobr>
<div class="jd-descrdiv">Resizes the queue to a new size.</div>
</td></tr>
</table>
<!-- ========== METHOD SUMMARY =========== -->
<table id="inhmethods" class="jd-sumtable"><tr><th>
<a href="#" class="toggle-all" onclick="return toggleAllInherited(this, null)">[Expand]</a>
<div style="clear:left;">Inherited Methods</div></th></tr>
<tr class="api apilevel-" >
<td colspan="12">
<a href="#" onclick="return toggleInherited(this, null)" id="inherited-methods-com.ericsson.research.trap.spi.queues.MessageQueue" class="jd-expando-trigger closed"
><img id="inherited-methods-com.ericsson.research.trap.spi.queues.MessageQueue-trigger"
src="../../../../../../../assets/images/triangle-closed.png"
class="jd-expando-trigger-img" /></a>
From interface
<a href="../../../../../../com/ericsson/research/trap/spi/queues/MessageQueue.html">com.ericsson.research.trap.spi.queues.MessageQueue</a>
<div id="inherited-methods-com.ericsson.research.trap.spi.queues.MessageQueue">
<div id="inherited-methods-com.ericsson.research.trap.spi.queues.MessageQueue-list"
class="jd-inheritedlinks">
</div>
<div id="inherited-methods-com.ericsson.research.trap.spi.queues.MessageQueue-summary" style="display: none;">
<table class="jd-sumtable-expando">
<tr class="alt-color api apilevel-" >
<td class="jd-typecol"><nobr>
abstract
<a href="../../../../../../com/ericsson/research/trap/spi/queues/MessageQueue.html">MessageQueue</a></nobr>
</td>
<td class="jd-linkcol" width="100%"><nobr>
<span class="sympad"><a href="../../../../../../com/ericsson/research/trap/spi/queues/MessageQueue.html#createNewQueue()">createNewQueue</a></span>()</nobr>
<div class="jd-descrdiv">Creates a new queue with the same settings as this queue, but none of the messages.</div>
</td></tr>
<tr class=" api apilevel-" >
<td class="jd-typecol"><nobr>
abstract
<a href="http://download.oracle.com/javase/6/docs/api/index.html?java/lang/String.html">String</a></nobr>
</td>
<td class="jd-linkcol" width="100%"><nobr>
<span class="sympad"><a href="../../../../../../com/ericsson/research/trap/spi/queues/MessageQueue.html#getQueueType()">getQueueType</a></span>()</nobr>
<div class="jd-descrdiv">Accessor method for the general message queue type provided by this queue, as defined in <code><a href="../../../../../../com/ericsson/research/trap/TrapEndpoint.html">TrapEndpoint</a></code>.</div>
</td></tr>
<tr class="alt-color api apilevel-" >
<td class="jd-typecol"><nobr>
abstract
boolean</nobr>
</td>
<td class="jd-linkcol" width="100%"><nobr>
<span class="sympad"><a href="../../../../../../com/ericsson/research/trap/spi/queues/MessageQueue.html#hasMoreThanOne()">hasMoreThanOne</a></span>()</nobr>
<div class="jd-descrdiv">Method to ask if the queue has more than one elements in it</div>
</td></tr>
<tr class=" api apilevel-" >
<td class="jd-typecol"><nobr>
abstract
int</nobr>
</td>
<td class="jd-linkcol" width="100%"><nobr>
<span class="sympad"><a href="../../../../../../com/ericsson/research/trap/spi/queues/MessageQueue.html#length()">length</a></span>()</nobr>
<div class="jd-descrdiv">Current length of the queue.</div>
</td></tr>
<tr class="alt-color api apilevel-" >
<td class="jd-typecol"><nobr>
abstract
<a href="../../../../../../com/ericsson/research/trap/spi/TrapMessage.html">TrapMessage</a></nobr>
</td>
<td class="jd-linkcol" width="100%"><nobr>
<span class="sympad"><a href="../../../../../../com/ericsson/research/trap/spi/queues/MessageQueue.html#peek()">peek</a></span>()</nobr>
<div class="jd-descrdiv">Looks at the first message in the queue</div>
</td></tr>
<tr class=" api apilevel-" >
<td class="jd-typecol"><nobr>
abstract
<a href="../../../../../../com/ericsson/research/trap/spi/TrapMessage.html">TrapMessage</a></nobr>
</td>
<td class="jd-linkcol" width="100%"><nobr>
<span class="sympad"><a href="../../../../../../com/ericsson/research/trap/spi/queues/MessageQueue.html#pop()">pop</a></span>()</nobr>
<div class="jd-descrdiv">Removes the first message of the queue</div>
</td></tr>
<tr class="alt-color api apilevel-" >
<td class="jd-typecol"><nobr>
abstract
void</nobr>
</td>
<td class="jd-linkcol" width="100%"><nobr>
<span class="sympad"><a href="../../../../../../com/ericsson/research/trap/spi/queues/MessageQueue.html#put(com.ericsson.research.trap.spi.TrapMessage)">put</a></span>(<a href="../../../../../../com/ericsson/research/trap/spi/TrapMessage.html">TrapMessage</a> m)</nobr>
<div class="jd-descrdiv">Inserts a TrapMessage at the end of the queue</div>
</td></tr>
<tr class=" api apilevel-" >
<td class="jd-typecol"><nobr>
abstract
long</nobr>
</td>
<td class="jd-linkcol" width="100%"><nobr>
<span class="sympad"><a href="../../../../../../com/ericsson/research/trap/spi/queues/MessageQueue.html#size()">size</a></span>()</nobr>
<div class="jd-descrdiv">Maximum size of the queue</div>
</td></tr>
</table>
</div>
</div>
</td></tr>
</table>
</div><!-- jd-descr (summary) -->
<!-- Details -->
<!-- XML Attributes -->
<!-- Enum Values -->
<!-- Constants -->
<!-- Fields -->
<!-- Public ctors -->
<!-- ========= CONSTRUCTOR DETAIL ======== -->
<!-- Protected ctors -->
<!-- ========= METHOD DETAIL ======== -->
<!-- Public methdos -->
<h2>Public Methods</h2>
<A NAME="resize(long)"></A>
<div class="jd-details api apilevel-">
<h4 class="jd-details-title">
<span class="normal">
public
abstract
void
</span>
<span class="sympad">resize</span>
<span class="normal">(long newSize)</span>
</h4>
<div class="api-level">
<div>
</div>
</div>
<div class="jd-details-descr">
<div class="jd-tagdata jd-tagdescr"><p>Resizes the queue to a new size. If the new size is smaller than the old size, the put() command may fail or
block. The queue will not discard any old messages.
<p>
For example, resizing a queue with 20 items to size 10 will not remove any of the existing messages. Insertions
will fail, however, until there are 9 or fewer items in the queue.</p></div>
<div class="jd-tagdata">
<h5 class="jd-tagtitle">Parameters</h5>
<table class="jd-tagtable">
<tr>
<th>newSize</td>
<td>The new queue size, in number of items or bytes (depending on the queue type)
</td>
</tr>
</table>
</div>
</div>
</div>
<!-- ========= METHOD DETAIL ======== -->
<!-- ========= END OF CLASS DATA ========= -->
<A NAME="navbar_top"></A>
<div id="footer">
Generated by <a href="http://code.google.com/p/doclava/">Doclava</a>.
</div> <!-- end footer -->
</div> <!-- jd-content -->
</div><!-- end doc-content -->
</div> <!-- end body-content -->
<script type="text/javascript">
init(); /* initialize doclava-developer-docs.js */
</script>
</body>
</html> | {'content_hash': 'c27016d26b9edac66883c23e026ccd23', 'timestamp': '', 'source': 'github', 'line_count': 682, 'max_line_length': 294, 'avg_line_length': 24.957478005865102, 'alnum_prop': 0.5345749368427237, 'repo_name': 'princeofdarkness76/trap', 'id': '50a5d77009e69eeff3dd750fa20026c65158bb1c', 'size': '17021', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'trap-api/apidocs/reference/com/ericsson/research/trap/spi/queues/ResizableMessageQueue.html', 'mode': '33188', 'license': 'bsd-3-clause', 'language': [{'name': 'CSS', 'bytes': '203809'}, {'name': 'HTML', 'bytes': '40284461'}, {'name': 'Java', 'bytes': '2358762'}, {'name': 'JavaScript', 'bytes': '390348'}, {'name': 'Shell', 'bytes': '7473'}]} |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.util;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.io.BufferExposingByteArrayOutputStream;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.io.FileUtilRt;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.EmptyIcon;
import com.intellij.util.ui.ImageUtil;
import com.intellij.util.ui.JBUI;
import com.intellij.util.ui.JBUI.ScaleContext;
import com.intellij.util.ui.UIUtil;
import org.imgscalr.Scalr;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.awt.*;
import java.awt.image.BufferedImageOp;
import java.awt.image.ImageFilter;
import java.io.*;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.URLConnection;
import java.util.ArrayList;
import java.util.concurrent.ConcurrentMap;
import static com.intellij.util.ImageLoader.ImageDesc.Type.IMG;
import static com.intellij.util.ImageLoader.ImageDesc.Type.SVG;
import static com.intellij.util.ui.JBUI.ScaleType.PIX_SCALE;
public class ImageLoader implements Serializable {
private static final Logger LOG = Logger.getInstance("#com.intellij.util.ImageLoader");
public static final long CACHED_IMAGE_MAX_SIZE = (long)(Registry.doubleValue("ide.cached.image.max.size") * 1024 * 1024);
private static final ConcurrentMap<String, Image> ourCache = ContainerUtil.createConcurrentSoftValueMap();
public static void clearCache() {
ourCache.clear();
}
@SuppressWarnings("UnusedDeclaration") // set from com.intellij.internal.IconsLoadTime
private static LoadFunction measureLoad;
/**
* For internal usage.
*/
public interface LoadFunction {
Image load(@Nullable LoadFunction delegate, @NotNull ImageDesc.Type type) throws IOException;
}
public static class ImageDesc {
public enum Type {IMG, SVG}
final String path;
final Class cls; // resource class if present
final double scale; // initial scale factor
final Type type;
final boolean original; // path is not altered
ImageDesc(@NotNull String path, @Nullable Class cls, double scale, @NotNull Type type) {
this(path, cls, scale, type, false);
}
ImageDesc(@NotNull String path, @Nullable Class cls, double scale, @NotNull Type type, boolean original) {
this.path = path;
this.cls = cls;
this.scale = scale;
this.type = type;
this.original = original;
}
@Nullable
public Image load() throws IOException {
return load(true);
}
@Nullable
public Image load(boolean useCache) throws IOException {
InputStream stream = null;
if (cls != null) {
//noinspection IOResourceOpenedButNotSafelyClosed
stream = cls.getResourceAsStream(path);
if (stream == null) return null;
}
String cacheKey = null;
URL url = null;
if (stream == null) {
if (useCache) {
cacheKey = path + (type == SVG ? "_@" + scale + "x" : "");
Image image = ourCache.get(cacheKey);
if (image != null) return image;
}
url = new URL(path);
URLConnection connection = url.openConnection();
if (connection instanceof HttpURLConnection) {
if (!original) return null;
connection.addRequestProperty("User-Agent", "IntelliJ");
}
stream = connection.getInputStream();
}
Image image = loadImpl(url, stream, scale);
if (image != null && cacheKey != null &&
4L * image.getWidth(null) * image.getHeight(null) <= CACHED_IMAGE_MAX_SIZE)
{
ourCache.put(cacheKey, image);
}
return image;
}
Image loadImpl(final URL url, final InputStream stream, final double scale) throws IOException {
LoadFunction f = new LoadFunction() {
@Override
public Image load(@Nullable LoadFunction delegate, @NotNull Type type) throws IOException {
switch (type) {
case SVG:
return SVGLoader.load(url, stream, ImageDesc.this.scale);
case IMG:
return ImageLoader.load(stream, scale);
}
return null;
}
};
if (measureLoad != null) {
return measureLoad.load(f, type);
}
return f.load(null, type);
}
@Override
public String toString() {
return path + ", scale: " + scale + ", type: " + type;
}
}
private static class ImageDescList extends ArrayList<ImageDesc> {
private ImageDescList() {}
static class Builder {
final ImageDescList list = new ImageDescList();
final String name;
final String ext;
final Class cls;
final boolean svg;
final double scale;
Builder(String name, String ext, Class cls, boolean svg, double scale) {
this.name = name;
this.ext = ext;
this.cls = cls;
this.svg = svg;
this.scale = scale;
}
void add(boolean retina, boolean dark) {
if (svg) add(retina, dark, SVG);
add(retina, dark, IMG);
}
void add(boolean retina, boolean dark, ImageDesc.Type type) {
String _ext = SVG == type ? "svg" : ext;
double _scale = SVG == type ? scale : retina ? 2 : 1;
list.add(new ImageDesc(name + (dark ? "_dark" : "") + (retina ? "@2x" : "") + "." + _ext, cls, _scale, type));
if (retina && dark) {
list.add(new ImageDesc(name + "@2x_dark" + "." + _ext, cls, _scale, type));
}
if (retina) {
// a fallback to 1x icon
list.add(new ImageDesc(name + (dark ? "_dark" : "") + "." + _ext, cls, SVG == type ? scale : 1, type));
}
}
void add(ImageDesc.Type type) {
list.add(new ImageDesc(name + "." + ext, cls, 1.0, type, true));
}
ImageDescList build() {
return list;
}
}
@Nullable
public Image load() {
return load(ImageConverterChain.create());
}
@Nullable
public Image load(@NotNull ImageConverterChain converters) {
return load(converters, true);
}
@Nullable
public Image load(@NotNull ImageConverterChain converters, boolean useCache) {
for (ImageDesc desc : this) {
try {
Image image = desc.load(useCache);
if (image == null) continue;
LOG.debug("Loaded image: " + desc);
return converters.convert(image, desc);
}
catch (IOException ignore) {
}
}
return null;
}
public static ImageDescList create(@NotNull String path,
@Nullable Class cls,
boolean dark,
boolean allowFloatScaling,
ScaleContext ctx)
{
// Prefer retina images for HiDPI scale, because downscaling
// retina images provides a better result than up-scaling non-retina images.
boolean retina = JBUI.isHiDPI(ctx.getScale(PIX_SCALE));
Builder list = new Builder(FileUtil.getNameWithoutExtension(path),
FileUtilRt.getExtension(path),
cls,
Registry.is("ide.svg.icon"),
adjustScaleFactor(allowFloatScaling, ctx.getScale(PIX_SCALE)));
if (path.contains("://") && !path.startsWith("file:")) {
list.add(StringUtil.endsWithIgnoreCase(path, ".svg") ? SVG : IMG);
}
else if (retina && dark) {
list.add(true, true);
list.add(true, false); // fallback to non-dark
}
else if (dark) {
list.add(false, true);
list.add(false, false); // fallback to non-dark
}
else if (retina) {
list.add(true, false);
}
else {
list.add(false, false);
}
return list.build();
}
}
private interface ImageConverter {
Image convert(@Nullable Image source, ImageDesc desc);
}
private static class ImageConverterChain extends ArrayList<ImageConverter> {
private ImageConverterChain() {}
public static ImageConverterChain create() {
return new ImageConverterChain();
}
ImageConverterChain withFilter(final ImageFilter[] filters) {
if (filters == null) return this;
ImageConverterChain chain = this;
for (ImageFilter filter : filters) {
chain = chain.withFilter(filter);
}
return chain;
}
ImageConverterChain withFilter(final ImageFilter filter) {
if (filter == null) return this;
return with(new ImageConverter() {
@Override
public Image convert(Image source, ImageDesc desc) {
return ImageUtil.filter(source, filter);
}
});
}
ImageConverterChain withHiDPI(final ScaleContext ctx) {
if (ctx == null) return this;
return with(new ImageConverter() {
@Override
public Image convert(Image source, ImageDesc desc) {
return ImageUtil.ensureHiDPI(source, ctx);
}
});
}
public ImageConverterChain with(ImageConverter f) {
add(f);
return this;
}
public Image convert(Image image, ImageDesc desc) {
for (ImageConverter f : this) {
image = f.convert(image, desc);
}
return image;
}
}
public static final Component ourComponent = new Component() {
};
private static boolean waitForImage(Image image) {
if (image == null) return false;
if (image.getWidth(null) > 0) return true;
MediaTracker mediatracker = new MediaTracker(ourComponent);
mediatracker.addImage(image, 1);
try {
mediatracker.waitForID(1, 5000);
}
catch (InterruptedException ex) {
LOG.info(ex);
}
return !mediatracker.isErrorID(1);
}
@Nullable
public static Image loadFromUrl(@NotNull URL url) {
return loadFromUrl(url, true);
}
@Nullable
public static Image loadFromUrl(@NotNull URL url, boolean allowFloatScaling) {
return loadFromUrl(url, allowFloatScaling, true, new ImageFilter[] {null}, ScaleContext.create());
}
/**
* @see #loadFromUrl(URL, boolean, boolean, boolean, ImageFilter[], ScaleContext)
*/
@Nullable
public static Image loadFromUrl(@NotNull URL url, final boolean allowFloatScaling, boolean useCache, ImageFilter[] filters, final ScaleContext ctx) {
return loadFromUrl(url, allowFloatScaling, useCache, UIUtil.isUnderDarcula(), filters, ctx);
}
/**
* Loads an image of available resolution (1x, 2x, ...) and scales to address the provided scale context.
* Then wraps the image with {@link JBHiDPIScaledImage} if necessary.
*/
@Nullable
public static Image loadFromUrl(@NotNull URL url, final boolean allowFloatScaling, boolean useCache, boolean dark, ImageFilter[] filters, final ScaleContext ctx) {
// We can't check all 3rd party plugins and convince the authors to add @2x icons.
// In IDE-managed HiDPI mode with scale > 1.0 we scale images manually.
return ImageDescList.create(url.toString(), null, dark, allowFloatScaling, ctx).load(
ImageConverterChain.create().
withFilter(filters).
with(new ImageConverter() {
@Override
public Image convert(Image source, ImageDesc desc) {
if (source != null && desc.type != SVG) {
double scale = adjustScaleFactor(allowFloatScaling, ctx.getScale(PIX_SCALE));
if (desc.scale > 1) scale /= desc.scale; // compensate the image original scale
source = scaleImage(source, scale);
}
return source;
}
}).
withHiDPI(ctx),
useCache);
}
private static double adjustScaleFactor(boolean allowFloatScaling, double scale) {
return allowFloatScaling ? scale : JBUI.isHiDPI(scale) ? 2f : 1f;
}
@NotNull
public static Image scaleImage(Image image, double scale) {
if (scale == 1.0) return image;
if (image instanceof JBHiDPIScaledImage) {
return ((JBHiDPIScaledImage)image).scale(scale);
}
int w = image.getWidth(null);
int h = image.getHeight(null);
if (w <= 0 || h <= 0) {
return image;
}
int width = (int)Math.round(scale * w);
int height = (int)Math.round(scale * h);
// Using "QUALITY" instead of "ULTRA_QUALITY" results in images that are less blurry
// because ultra quality performs a few more passes when scaling, which introduces blurriness
// when the scaling factor is relatively small (i.e. <= 3.0f) -- which is the case here.
return Scalr.resize(ImageUtil.toBufferedImage(image), Scalr.Method.QUALITY, Scalr.Mode.FIT_EXACT, width, height, (BufferedImageOp[])null);
}
@NotNull
public static Image scaleImage(@NotNull Image image, int targetSize) {
return scaleImage(image, targetSize, targetSize);
}
@NotNull
public static Image scaleImage(@NotNull Image image, int targetWidth, int targetHeight) {
if (image instanceof JBHiDPIScaledImage) {
return ((JBHiDPIScaledImage)image).scale(targetWidth, targetHeight);
}
int w = image.getWidth(null);
int h = image.getHeight(null);
if (w <= 0 || h <= 0 || w == targetWidth && h == targetHeight) {
return image;
}
return Scalr.resize(ImageUtil.toBufferedImage(image), Scalr.Method.QUALITY, Scalr.Mode.FIT_EXACT,
targetWidth, targetHeight,
(BufferedImageOp[])null);
}
@Nullable
public static Image loadFromResource(@NonNls @NotNull String s) {
Class callerClass = ReflectionUtil.getGrandCallerClass();
if (callerClass == null) return null;
return loadFromResource(s, callerClass);
}
@Nullable
public static Image loadFromResource(@NonNls @NotNull String path, @NotNull Class aClass) {
ScaleContext ctx = ScaleContext.create();
return ImageDescList.create(path, aClass, UIUtil.isUnderDarcula(), true, ctx).
load(ImageConverterChain.create().withHiDPI(ctx));
}
public static Image loadFromBytes(@NotNull final byte[] bytes) {
return loadFromStream(new ByteArrayInputStream(bytes));
}
public static Image loadFromStream(@NotNull final InputStream inputStream) {
return loadFromStream(inputStream, 1);
}
public static Image loadFromStream(@NotNull final InputStream inputStream, final int scale) {
return loadFromStream(inputStream, scale, null);
}
public static Image loadFromStream(@NotNull final InputStream inputStream, final int scale, ImageFilter filter) {
Image image = load(inputStream, scale);
ImageDesc desc = new ImageDesc("", null, scale, IMG);
return ImageConverterChain.create().withFilter(filter).withHiDPI(ScaleContext.create()).convert(image, desc);
}
public static @Nullable Image loadCustomIcon(@NotNull File f) throws IOException {
final Image icon = _loadImageFromFile(f);
if (icon == null)
return null;
final int w = icon.getWidth(null);
final int h = icon.getHeight(null);
if (w <= 0 || h <= 0) {
LOG.error("negative image size: w=" + w + ", h=" + h + ", path=" + f.getPath());
return null;
}
if (w > EmptyIcon.ICON_18.getIconWidth() || h > EmptyIcon.ICON_18.getIconHeight()) {
final double s = EmptyIcon.ICON_18.getIconWidth()/(double)Math.max(w, h);
return scaleImage(icon, s);
}
return icon;
}
private static @Nullable Image _loadImageFromFile(@NotNull File f) throws IOException {
final ScaleContext ctx = ScaleContext.create();
final double scale = ctx.getScale(PIX_SCALE); // probably, need implement naming conventions: filename ends with @2x => HiDPI (scale=2)
final ImageDesc desc = new ImageDesc(f.toURI().toURL().toString(), null, scale, StringUtil.endsWithIgnoreCase(f.getPath(), ".svg") ? SVG : IMG);
return ImageUtil.ensureHiDPI(desc.load(), ctx);
}
private static Image load(@NotNull final InputStream inputStream, double scale) {
if (scale <= 0) throw new IllegalArgumentException("Scale must be 1 or greater");
try {
BufferExposingByteArrayOutputStream outputStream = new BufferExposingByteArrayOutputStream();
try {
byte[] buffer = new byte[1024];
while (true) {
final int n = inputStream.read(buffer);
if (n < 0) break;
outputStream.write(buffer, 0, n);
}
}
finally {
inputStream.close();
}
Image image = Toolkit.getDefaultToolkit().createImage(outputStream.getInternalBuffer(), 0, outputStream.size());
waitForImage(image);
return image;
}
catch (Exception ex) {
LOG.error(ex);
}
return null;
}
}
| {'content_hash': 'c87bfc552026007495e7a163b10fc6f6', 'timestamp': '', 'source': 'github', 'line_count': 499, 'max_line_length': 165, 'avg_line_length': 34.06813627254509, 'alnum_prop': 0.6401176470588236, 'repo_name': 'msebire/intellij-community', 'id': '044f6ca18ad1cd5d5ccd2ad1509b7e703f6b2b5a', 'size': '17000', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'platform/util/src/com/intellij/util/ImageLoader.java', 'mode': '33188', 'license': 'apache-2.0', 'language': []} |
if (typeof mapbox == 'undefined') mapbox = {};
if (typeof mapbox.markers == 'undefined') mapbox.markers = {};
mapbox.markers.layer = function() {
var m = {},
// external list of geojson features
features = [],
// internal list of markers
markers = [],
// internal list of callbacks
callbackManager = new MM.CallbackManager(m, ['drawn', 'markeradded']),
// the absolute position of the parent element
position = null,
// a factory function for creating DOM elements out of
// GeoJSON objects
factory = mapbox.markers.simplestyle_factory,
// a sorter function for sorting GeoJSON objects
// in the DOM
sorter = function(a, b) {
return b.geometry.coordinates[1] -
a.geometry.coordinates[1];
},
// a list of urls from which features can be loaded.
// these can be templated with {z}, {x}, and {y}
urls,
// map bounds
left = null,
right = null,
// a function that filters points
filter = function() {
return true;
},
_seq = 0,
keyfn = function() {
return ++_seq;
},
index = {};
// The parent DOM element
m.parent = document.createElement('div');
m.parent.style.cssText = 'position: absolute; top: 0px;' +
'left:0px; width:100%; height:100%; margin:0; padding:0; z-index:0;pointer-events:none;';
m.name = 'markers';
// reposition a single marker element
function reposition(marker) {
// remember the tile coordinate so we don't have to reproject every time
if (!marker.coord) marker.coord = m.map.locationCoordinate(marker.location);
var pos = m.map.coordinatePoint(marker.coord);
var pos_loc, new_pos;
// If this point has wound around the world, adjust its position
// to the new, onscreen location
if (pos.x < 0) {
pos_loc = new MM.Location(marker.location.lat, marker.location.lon);
pos_loc.lon += Math.ceil((left.lon - marker.location.lon) / 360) * 360;
new_pos = m.map.locationPoint(pos_loc);
if (new_pos.x < m.map.dimensions.x) {
pos = new_pos;
marker.coord = m.map.locationCoordinate(pos_loc);
}
} else if (pos.x > m.map.dimensions.x) {
pos_loc = new MM.Location(marker.location.lat, marker.location.lon);
pos_loc.lon -= Math.ceil((marker.location.lon - right.lon) / 360) * 360;
new_pos = m.map.locationPoint(pos_loc);
if (new_pos.x > 0) {
pos = new_pos;
marker.coord = m.map.locationCoordinate(pos_loc);
}
}
pos.scale = 1;
pos.width = pos.height = 0;
MM.moveElement(marker.element, pos);
}
// Adding and removing callbacks is mainly a way to enable mmg_interaction to operate.
// I think there are better ways to do this, by, for instance, having mmg be able to
// register 'binders' to markers, but this is backwards-compatible and equivalent
// externally.
m.addCallback = function(event, callback) {
callbackManager.addCallback(event, callback);
return m;
};
m.removeCallback = function(event, callback) {
callbackManager.removeCallback(event, callback);
return m;
};
// Draw this layer - reposition all markers on the div. This requires
// the markers library to be attached to a map, and will noop otherwise.
m.draw = function() {
if (!m.map) return;
left = m.map.pointLocation(new MM.Point(0, 0));
right = m.map.pointLocation(new MM.Point(m.map.dimensions.x, 0));
callbackManager.dispatchCallback('drawn', m);
for (var i = 0; i < markers.length; i++) {
reposition(markers[i]);
}
};
// Add a fully-formed marker to the layer. This fires a `markeradded` event.
// This does not require the map element t be attached.
m.add = function(marker) {
if (!marker || !marker.element) return null;
m.parent.appendChild(marker.element);
markers.push(marker);
callbackManager.dispatchCallback('markeradded', marker);
return marker;
};
// Remove a fully-formed marker - which must be the same exact marker
// object as in the markers array - from the layer.
m.remove = function(marker) {
if (!marker) return null;
m.parent.removeChild(marker.element);
for (var i = 0; i < markers.length; i++) {
if (markers[i] === marker) {
markers.splice(i, 1);
return marker;
}
}
return marker;
};
m.markers = function(x) {
if (!arguments.length) return markers;
};
// Add a GeoJSON feature to the markers layer.
m.add_feature = function(x) {
return m.features(m.features().concat([x]));
};
m.sort = function(x) {
if (!arguments.length) return sorter;
sorter = x;
return m;
};
// Public data interface
m.features = function(x) {
// Return features
if (!arguments.length) return features;
// Set features
if (!x) x = [];
features = x.slice();
features.sort(sorter);
for (var j = 0; j < markers.length; j++) {
markers[j].touch = false;
}
for (var i = 0; i < features.length; i++) {
if (filter(features[i])) {
var id = keyfn(features[i]);
if (index[id]) {
// marker is already on the map, needs to be moved or rebuilt
index[id].location = new MM.Location(
features[i].geometry.coordinates[1],
features[i].geometry.coordinates[0]);
index[id].coord = null;
reposition(index[id]);
} else {
// marker needs to be added to the map
index[id] = m.add({
element: factory(features[i]),
location: new MM.Location(
features[i].geometry.coordinates[1],
features[i].geometry.coordinates[0]),
data: features[i]
});
}
if (index[id]) index[id].touch = true;
}
}
for (var k = markers.length - 1; k >= 0; k--) {
if (markers[k].touch === false) {
m.remove(markers[k]);
}
}
if (m.map && m.map.coordinate) m.map.draw();
return m;
};
// Request features from a URL - either a local URL or a JSONP call.
// Expects GeoJSON-formatted features.
m.url = function(x, callback) {
if (!arguments.length) return urls;
if (typeof reqwest === 'undefined') throw 'reqwest is required for url loading';
if (typeof x === 'string') x = [x];
urls = x;
function add_features(err, x) {
if (err && callback) return callback(err);
var features = typeof x !== 'undefined' && x.features ? x.features : null;
if (features) m.features(features);
if (callback) callback(err, features, m);
}
reqwest((urls[0].match(/geojsonp$/)) ? {
url: urls[0] + (~urls[0].indexOf('?') ? '&' : '?') + 'callback=?',
type: 'jsonp',
success: function(resp) { add_features(null, resp); },
error: add_features
} : {
url: urls[0],
type: 'json',
success: function(resp) { add_features(null, resp); },
error: add_features
});
return m;
};
m.id = function(x, callback) {
return m.url('http://a.tiles.mapbox.com/v3/' + x + '/markers.geojsonp', callback);
};
m.csv = function(x) {
return m.features(mapbox.markers.csv_to_geojson(x));
};
m.extent = function() {
var ext = [{
lat: Infinity,
lon: Infinity
}, {
lat: -Infinity,
lon: -Infinity
}];
var ft = m.features();
for (var i = 0; i < ft.length; i++) {
var coords = ft[i].geometry.coordinates;
if (coords[0] < ext[0].lon) ext[0].lon = coords[0];
if (coords[1] < ext[0].lat) ext[0].lat = coords[1];
if (coords[0] > ext[1].lon) ext[1].lon = coords[0];
if (coords[1] > ext[1].lat) ext[1].lat = coords[1];
}
return ext;
};
m.key = function(x) {
if (!arguments.length) return keyfn;
if (x === null) {
keyfn = function() { return ++_seq; };
} else {
keyfn = x;
}
return m;
};
// Factory interface
m.factory = function(x) {
if (!arguments.length) return factory;
factory = x;
// re-render all features
m.features(m.features());
return m;
};
m.filter = function(x) {
if (!arguments.length) return filter;
filter = x;
// Setting a filter re-sets the features into a new array.
// This does _not_ change the actual output of .features()
m.features(m.features());
return m;
};
m.destroy = function() {
if (m.parent.parentNode) {
m.parent.parentNode.removeChild(m.parent);
}
};
// Get or set this layer's name
m.named = function(x) {
if (!arguments.length) return m.name;
m.name = x;
return m;
};
m.enabled = true;
m.enable = function() {
this.enabled = true;
this.parent.style.display = '';
return m;
};
m.disable = function() {
this.enabled = false;
this.parent.style.display = 'none';
return m;
};
return m;
};
mmg = mapbox.markers.layer; // Backwards compatibility
mapbox.markers.interaction = function(mmg) {
// Make markersLayer.interaction a singleton and this an accessor.
if (mmg && mmg.interaction) return mmg.interaction;
var mi = {},
tooltips = [],
exclusive = true,
hideOnMove = true,
showOnHover = true,
close_timer = null,
on = true,
formatter;
mi.formatter = function(x) {
if (!arguments.length) return formatter;
formatter = x;
return mi;
};
mi.formatter(function(feature) {
var o = '',
props = feature.properties;
// Tolerate markers without properties at all.
if (!props) return null;
if (props.title) {
o += '<div class="marker-title">' + props.title + '</div>';
}
if (props.description) {
o += '<div class="marker-description">' + props.description + '</div>';
}
if (typeof html_sanitize !== undefined) {
o = html_sanitize(o,
function(url) {
if (/^(https?:\/\/|data:image)/.test(url)) return url;
},
function(x) { return x; });
}
return o;
});
mi.hideOnMove = function(x) {
if (!arguments.length) return hideOnMove;
hideOnMove = x;
return mi;
};
mi.exclusive = function(x) {
if (!arguments.length) return exclusive;
exclusive = x;
return mi;
};
mi.showOnHover = function(x) {
if (!arguments.length) return showOnHover;
showOnHover = x;
return mi;
};
mi.hideTooltips = function() {
while (tooltips.length) mmg.remove(tooltips.pop());
for (var i = 0; i < markers.length; i++) {
delete markers[i].clicked;
}
};
mi.add = function() {
on = true;
return mi;
};
mi.remove = function() {
on = false;
return mi;
};
mi.bindMarker = function(marker) {
var delayed_close = function() {
if (showOnHover === false) return;
if (!marker.clicked) close_timer = window.setTimeout(function() {
mi.hideTooltips();
}, 200);
};
var show = function(e) {
if (e && e.type == 'mouseover' && showOnHover === false) return;
if (!on) return;
var content = formatter(marker.data);
// Don't show a popup if the formatter returns an
// empty string. This does not do any magic around DOM elements.
if (!content) return;
if (exclusive && tooltips.length > 0) {
mi.hideTooltips();
// We've hidden all of the tooltips, so let's not close
// the one that we're creating as soon as it is created.
if (close_timer) window.clearTimeout(close_timer);
}
var tooltip = document.createElement('div');
tooltip.className = 'marker-tooltip';
tooltip.style.width = '100%';
var wrapper = tooltip.appendChild(document.createElement('div'));
wrapper.style.cssText = 'position: absolute; pointer-events: none;';
var popup = wrapper.appendChild(document.createElement('div'));
popup.className = 'marker-popup';
popup.style.cssText = 'pointer-events: auto;';
if (typeof content == 'string') {
popup.innerHTML = content;
} else {
popup.appendChild(content);
}
// Align the bottom of the tooltip with the top of its marker
wrapper.style.bottom = marker.element.offsetHeight / 2 + 20 + 'px';
// Block mouse and touch events
function stopPropagation(e) {
e.cancelBubble = true;
if (e.stopPropagation) { e.stopPropagation(); }
return false;
}
MM.addEvent(popup, 'mousedown', stopPropagation);
MM.addEvent(popup, 'touchstart', stopPropagation);
if (showOnHover) {
tooltip.onmouseover = function() {
if (close_timer) window.clearTimeout(close_timer);
};
tooltip.onmouseout = delayed_close;
}
var t = {
element: tooltip,
data: {},
interactive: false,
location: marker.location.copy()
};
tooltips.push(t);
marker.tooltip = t;
mmg.add(t);
mmg.draw();
};
marker.showTooltip = show;
marker.element.onclick = marker.element.ontouchstart = function() {
show();
marker.clicked = true;
};
marker.element.onmouseover = show;
marker.element.onmouseout = delayed_close;
};
function bindPanned() {
mmg.map.addCallback('panned', function() {
if (hideOnMove) {
while (tooltips.length) {
mmg.remove(tooltips.pop());
}
}
});
}
if (mmg) {
// Remove tooltips on panning
mmg.addCallback('drawn', bindPanned);
// Bind present markers
var markers = mmg.markers();
for (var i = 0; i < markers.length; i++) {
mi.bindMarker(markers[i]);
}
// Bind future markers
mmg.addCallback('markeradded', function(_, marker) {
// Markers can choose to be not-interactive. The main example
// of this currently is marker bubbles, which should not recursively
// give marker bubbles.
if (marker.interactive !== false) mi.bindMarker(marker);
});
// Save reference to self on the markers instance.
mmg.interaction = mi;
}
return mi;
};
mmg_interaction = mapbox.markers.interaction;
mapbox.markers.csv_to_geojson = function(x) {
// Extracted from d3
function csv_parse(text) {
var header;
return csv_parseRows(text, function(row, i) {
if (i) {
var o = {}, j = -1, m = header.length;
while (++j < m) o[header[j]] = row[j];
return o;
} else {
header = row;
return null;
}
});
}
function csv_parseRows (text, f) {
var EOL = {}, // sentinel value for end-of-line
EOF = {}, // sentinel value for end-of-file
rows = [], // output rows
re = /\r\n|[,\r\n]/g, // field separator regex
n = 0, // the current line number
t, // the current token
eol; // is the current token followed by EOL?
re.lastIndex = 0; // work-around bug in FF 3.6
/** @private Returns the next token. */
function token() {
if (re.lastIndex >= text.length) return EOF; // special case: end of file
if (eol) { eol = false; return EOL; } // special case: end of line
// special case: quotes
var j = re.lastIndex;
if (text.charCodeAt(j) === 34) {
var i = j;
while (i++ < text.length) {
if (text.charCodeAt(i) === 34) {
if (text.charCodeAt(i + 1) !== 34) break;
i++;
}
}
re.lastIndex = i + 2;
var c = text.charCodeAt(i + 1);
if (c === 13) {
eol = true;
if (text.charCodeAt(i + 2) === 10) re.lastIndex++;
} else if (c === 10) {
eol = true;
}
return text.substring(j + 1, i).replace(/""/g, "\"");
}
// common case
var m = re.exec(text);
if (m) {
eol = m[0].charCodeAt(0) !== 44;
return text.substring(j, m.index);
}
re.lastIndex = text.length;
return text.substring(j);
}
while ((t = token()) !== EOF) {
var a = [];
while ((t !== EOL) && (t !== EOF)) {
a.push(t);
t = token();
}
if (f && !(a = f(a, n++))) continue;
rows.push(a);
}
return rows;
}
var features = [];
var parsed = csv_parse(x);
if (!parsed.length) return features;
var latfield = '',
lonfield = '';
for (var f in parsed[0]) {
if (f.match(/^Lat/i)) latfield = f;
if (f.match(/^Lon/i)) lonfield = f;
}
if (!latfield || !lonfield) {
throw 'CSV: Could not find latitude or longitude field';
}
for (var i = 0; i < parsed.length; i++) {
if (parsed[i][lonfield] !== undefined &&
parsed[i][lonfield] !== undefined) {
features.push({
type: 'Feature',
properties: parsed[i],
geometry: {
type: 'Point',
coordinates: [
parseFloat(parsed[i][lonfield]),
parseFloat(parsed[i][latfield])]
}
});
}
}
return features;
};
mapbox.markers.simplestyle_factory = function(feature) {
var sizes = {
small: [20, 50],
medium: [30, 70],
large: [35, 90]
};
var fp = feature.properties || {};
var size = fp['marker-size'] || 'medium';
var symbol = (fp['marker-symbol']) ? '-' + fp['marker-symbol'] : '';
var color = fp['marker-color'] || '7e7e7e';
color = color.replace('#', '');
var d = document.createElement('img');
d.width = sizes[size][0];
d.height = sizes[size][1];
d.className = 'simplestyle-marker';
d.alt = fp.title || '';
d.src = (mapbox.markers.marker_baseurl || 'http://a.tiles.mapbox.com/v3/marker/') +
'pin-' +
// Internet Explorer does not support the `size[0]` syntax.
size.charAt(0) + symbol + '+' + color +
((window.devicePixelRatio === 2) ? '@2x' : '') +
'.png';
// Support retina markers for 2x devices
var ds = d.style;
ds.position = 'absolute';
ds.clip = 'rect(auto auto ' + (sizes[size][1] * 0.75) + 'px auto)';
ds.marginTop = -((sizes[size][1]) / 2) + 'px';
ds.marginLeft = -(sizes[size][0] / 2) + 'px';
ds.cursor = 'pointer';
ds.pointerEvents = 'all';
return d;
};
| {'content_hash': '66eaa0804b7574dba0515c7e8ef35321', 'timestamp': '', 'source': 'github', 'line_count': 649, 'max_line_length': 97, 'avg_line_length': 31.751926040061633, 'alnum_prop': 0.5031300043674479, 'repo_name': 'jlord/pagination-test', 'id': '11cb478b978f09cb4ff63d31dd7787f5ba02dd2b', 'size': '20607', 'binary': False, 'copies': '2', 'ref': 'refs/heads/master', 'path': 'node_modules/mapbox.js/node_modules/markers/dist/markers.js', 'mode': '33188', 'license': 'bsd-3-clause', 'language': [{'name': 'CSS', 'bytes': '6595'}, {'name': 'JavaScript', 'bytes': '57368'}]} |
/* global __NEXT_DATA__ */
import { parse, format } from 'url'
import EventEmitter from '../EventEmitter'
import shallowEquals from '../shallow-equals'
import PQueue from '../p-queue'
import { loadGetInitialProps, getURL } from '../utils'
import { _notifyBuildIdMismatch, _rewriteUrlForNextExport } from './'
export default class Router {
constructor (pathname, query, as, { pageLoader, Component, ErrorComponent, err } = {}) {
// represents the current component key
this.route = toRoute(pathname)
// set up the component cache (by route keys)
this.components = {}
// We should not keep the cache, if there's an error
// Otherwise, this cause issues when when going back and
// come again to the errored page.
if (Component !== ErrorComponent) {
this.components[this.route] = { Component, err }
}
// Handling Router Events
this.events = new EventEmitter()
this.pageLoader = pageLoader
this.prefetchQueue = new PQueue({ concurrency: 2 })
this.ErrorComponent = ErrorComponent
this.pathname = pathname
this.query = query
this.asPath = as
this.subscriptions = new Set()
this.componentLoadCancel = null
this.onPopState = this.onPopState.bind(this)
if (typeof window !== 'undefined') {
// in order for `e.state` to work on the `onpopstate` event
// we have to register the initial route upon initialization
this.changeState('replaceState', format({ pathname, query }), getURL())
window.addEventListener('popstate', this.onPopState)
}
}
async onPopState (e) {
if (!e.state) {
// We get state as undefined for two reasons.
// 1. With older safari (< 8) and older chrome (< 34)
// 2. When the URL changed with #
//
// In the both cases, we don't need to proceed and change the route.
// (as it's already changed)
// But we can simply replace the state with the new changes.
// Actually, for (1) we don't need to nothing. But it's hard to detect that event.
// So, doing the following for (1) does no harm.
const { pathname, query } = this
this.changeState('replaceState', format({ pathname, query }), getURL())
return
}
const { url, as, options } = e.state
this.replace(url, as, options)
}
update (route, Component) {
const data = this.components[route]
if (!data) {
throw new Error(`Cannot update unavailable route: ${route}`)
}
const newData = { ...data, Component }
this.components[route] = newData
if (route === this.route) {
this.notify(newData)
}
}
async reload (route) {
delete this.components[route]
this.pageLoader.clearCache(route)
if (route !== this.route) return
const { pathname, query } = this
const url = window.location.href
this.events.emit('routeChangeStart', url)
const routeInfo = await this.getRouteInfo(route, pathname, query, url)
const { error } = routeInfo
if (error && error.cancelled) {
return
}
this.notify(routeInfo)
if (error) {
this.events.emit('routeChangeError', error, url)
throw error
}
this.events.emit('routeChangeComplete', url)
}
back () {
window.history.back()
}
push (url, as = url, options = {}) {
return this.change('pushState', url, as, options)
}
replace (url, as = url, options = {}) {
return this.change('replaceState', url, as, options)
}
async change (method, _url, _as, options) {
// If url and as provided as an object representation,
// we'll format them into the string version here.
const url = typeof _url === 'object' ? format(_url) : _url
let as = typeof _as === 'object' ? format(_as) : _as
// Add the ending slash to the paths. So, we can serve the
// "<page>/index.html" directly for the SSR page.
if (__NEXT_DATA__.nextExport) {
as = _rewriteUrlForNextExport(as)
}
this.abortComponentLoad(as)
const { pathname, query } = parse(url, true)
// If the url change is only related to a hash change
// We should not proceed. We should only change the state.
if (this.onlyAHashChange(as)) {
this.changeState(method, url, as)
this.scrollToHash(as)
return
}
// If asked to change the current URL we should reload the current page
// (not location.reload() but reload getInitalProps and other Next.js stuffs)
// We also need to set the method = replaceState always
// as this should not go into the history (That's how browsers work)
if (!this.urlIsNew(pathname, query)) {
method = 'replaceState'
}
const route = toRoute(pathname)
const { shallow = false } = options
let routeInfo = null
this.events.emit('routeChangeStart', as)
// If shallow === false and other conditions met, we reuse the
// existing routeInfo for this route.
// Because of this, getInitialProps would not run.
if (shallow && this.isShallowRoutingPossible(route)) {
routeInfo = this.components[route]
} else {
routeInfo = await this.getRouteInfo(route, pathname, query, as)
}
const { error } = routeInfo
if (error && error.cancelled) {
return false
}
this.events.emit('beforeHistoryChange', as)
this.changeState(method, url, as, options)
const hash = window.location.hash.substring(1)
this.set(route, pathname, query, as, { ...routeInfo, hash })
if (error) {
this.events.emit('routeChangeError', error, as)
throw error
}
this.events.emit('routeChangeComplete', as)
return true
}
changeState (method, url, as, options = {}) {
if (method !== 'pushState' || getURL() !== as) {
window.history[method]({ url, as, options }, null, as)
}
}
async getRouteInfo (route, pathname, query, as) {
let routeInfo = null
try {
routeInfo = this.components[route]
if (!routeInfo) {
routeInfo = { Component: await this.fetchComponent(route, as) }
}
const { Component } = routeInfo
const ctx = { pathname, query, asPath: as }
routeInfo.props = await this.getInitialProps(Component, ctx)
this.components[route] = routeInfo
} catch (err) {
if (err.cancelled) {
return { error: err }
}
if (err.buildIdMismatched) {
// Now we need to reload the page or do the action asked by the user
_notifyBuildIdMismatch(as)
// We also need to cancel this current route change.
// We do it like this.
err.cancelled = true
return { error: err }
}
if (err.statusCode === 404) {
// Indicate main error display logic to
// ignore rendering this error as a runtime error.
err.ignore = true
}
const Component = this.ErrorComponent
routeInfo = { Component, err }
const ctx = { err, pathname, query }
routeInfo.props = await this.getInitialProps(Component, ctx)
routeInfo.error = err
}
return routeInfo
}
set (route, pathname, query, as, data) {
this.route = route
this.pathname = pathname
this.query = query
this.asPath = as
this.notify(data)
}
onlyAHashChange (as) {
if (!this.asPath) return false
const [ oldUrlNoHash, oldHash ] = this.asPath.split('#')
const [ newUrlNoHash, newHash ] = as.split('#')
// If the urls are change, there's more than a hash change
if (oldUrlNoHash !== newUrlNoHash) {
return false
}
// If the hash has changed, then it's a hash only change.
// This check is necessary to handle both the enter and
// leave hash === '' cases. The identity case falls through
// and is treated as a next reload.
return oldHash !== newHash
}
scrollToHash (as) {
const [ , hash ] = as.split('#')
const el = document.getElementById(hash)
if (el) {
el.scrollIntoView()
}
}
urlIsNew (pathname, query) {
return this.pathname !== pathname || !shallowEquals(query, this.query)
}
isShallowRoutingPossible (route) {
return (
// If there's cached routeInfo for the route.
Boolean(this.components[route]) &&
// If the route is already rendered on the screen.
this.route === route
)
}
async prefetch (url) {
// We don't add support for prefetch in the development mode.
// If we do that, our on-demand-entries optimization won't performs better
if (process.env.NODE_ENV === 'development') return
const { pathname } = parse(url)
const route = toRoute(pathname)
return this.prefetchQueue.add(() => this.fetchRoute(route))
}
async fetchComponent (route, as) {
let cancelled = false
const cancel = this.componentLoadCancel = function () {
cancelled = true
}
try {
const Component = await this.fetchRoute(route)
if (cancelled) {
const error = new Error(`Abort fetching component for route: "${route}"`)
error.cancelled = true
throw error
}
if (cancel === this.componentLoadCancel) {
this.componentLoadCancel = null
}
return Component
} catch (err) {
// There's an error in loading the route.
// Usually this happens when there's a failure in the webpack build
// So in that case, we need to load the page with full SSR
// That'll clean the invalid exising client side information.
// (Like cached routes)
window.location.href = as
throw err
}
}
async getInitialProps (Component, ctx) {
let cancelled = false
const cancel = () => { cancelled = true }
this.componentLoadCancel = cancel
const props = await loadGetInitialProps(Component, ctx)
if (cancel === this.componentLoadCancel) {
this.componentLoadCancel = null
}
if (cancelled) {
const err = new Error('Loading initial props cancelled')
err.cancelled = true
throw err
}
return props
}
async fetchRoute (route) {
return await this.pageLoader.loadPage(route)
}
abortComponentLoad (as) {
if (this.componentLoadCancel) {
this.events.emit('routeChangeError', new Error('Route Cancelled'), as)
this.componentLoadCancel()
this.componentLoadCancel = null
}
}
notify (data) {
this.subscriptions.forEach((fn) => fn(data))
}
subscribe (fn) {
this.subscriptions.add(fn)
return () => this.subscriptions.delete(fn)
}
}
function toRoute (path) {
return path.replace(/\/$/, '') || '/'
}
| {'content_hash': 'd01f9e83a6862acfae6a5e832b78decd', 'timestamp': '', 'source': 'github', 'line_count': 368, 'max_line_length': 90, 'avg_line_length': 28.61413043478261, 'alnum_prop': 0.6298195631528964, 'repo_name': 'nelak/next.js', 'id': '1873002d5a495e70b6c5b13b11c666b7bf431601', 'size': '10530', 'binary': False, 'copies': '3', 'ref': 'refs/heads/v3-beta', 'path': 'lib/router/router.js', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'JavaScript', 'bytes': '237649'}]} |
'use strict';
var express = require('express')
var router = require('./router');
module.exports = function(app) {
app.use(router);
require('./main');
};
| {'content_hash': '7593c85bb1449b509993ee4c9d1c94bf', 'timestamp': '', 'source': 'github', 'line_count': 10, 'max_line_length': 34, 'avg_line_length': 16.1, 'alnum_prop': 0.6335403726708074, 'repo_name': 'elliotf/example-node-app', 'id': '74ba546cc031ab09e66d4218cbbe8da998896b36', 'size': '161', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'routes/index.js', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'HTML', 'bytes': '17'}, {'name': 'JavaScript', 'bytes': '5111'}, {'name': 'Makefile', 'bytes': '167'}]} |
package features
import (
"k8s.io/apimachinery/pkg/util/runtime"
genericfeatures "k8s.io/apiserver/pkg/features"
utilfeature "k8s.io/apiserver/pkg/util/feature"
"k8s.io/component-base/featuregate"
)
const (
// Every feature gate should add method here following this template:
//
// // owner: @username
// // kep: http://kep.k8s.io/NNN
// // alpha: v1.X
// MyFeature featuregate.Feature = "MyFeature"
// owner: @tallclair
// beta: v1.4
AppArmor featuregate.Feature = "AppArmor"
// owner: @mtaufen
// alpha: v1.4
// beta: v1.11
// deprecated: 1.22
DynamicKubeletConfig featuregate.Feature = "DynamicKubeletConfig"
// owner: @pweil-
// alpha: v1.5
//
// Default userns=host for containers that are using other host namespaces, host mounts, the pod
// contains a privileged container, or specific non-namespaced capabilities (MKNOD, SYS_MODULE,
// SYS_TIME). This should only be enabled if user namespace remapping is enabled in the docker daemon.
ExperimentalHostUserNamespaceDefaultingGate featuregate.Feature = "ExperimentalHostUserNamespaceDefaulting"
// owner: @jiayingz
// beta: v1.10
//
// Enables support for Device Plugins
DevicePlugins featuregate.Feature = "DevicePlugins"
// owner: @dxist
// alpha: v1.16
//
// Enables support of HPA scaling to zero pods when an object or custom metric is configured.
HPAScaleToZero featuregate.Feature = "HPAScaleToZero"
// owner: @mikedanese
// alpha: v1.7
// beta: v1.12
//
// Gets a server certificate for the kubelet from the Certificate Signing
// Request API instead of generating one self signed and auto rotates the
// certificate as expiration approaches.
RotateKubeletServerCertificate featuregate.Feature = "RotateKubeletServerCertificate"
// owner: @jinxu
// beta: v1.10
//
// New local storage types to support local storage capacity isolation
LocalStorageCapacityIsolation featuregate.Feature = "LocalStorageCapacityIsolation"
// owner: @gnufied
// beta: v1.11
// Ability to Expand persistent volumes
ExpandPersistentVolumes featuregate.Feature = "ExpandPersistentVolumes"
// owner: @mlmhl
// beta: v1.15
// Ability to expand persistent volumes' file system without unmounting volumes.
ExpandInUsePersistentVolumes featuregate.Feature = "ExpandInUsePersistentVolumes"
// owner: @gnufied
// alpha: v1.14
// beta: v1.16
// Ability to expand CSI volumes
ExpandCSIVolumes featuregate.Feature = "ExpandCSIVolumes"
// owner: @verb
// alpha: v1.16
//
// Allows running an ephemeral container in pod namespaces to troubleshoot a running pod.
EphemeralContainers featuregate.Feature = "EphemeralContainers"
// owner: @sjenning
// alpha: v1.11
//
// Allows resource reservations at the QoS level preventing pods at lower QoS levels from
// bursting into resources requested at higher QoS levels (memory only for now)
QOSReserved featuregate.Feature = "QOSReserved"
// owner: @ConnorDoyle
// alpha: v1.8
// beta: v1.10
//
// Alternative container-level CPU affinity policies.
CPUManager featuregate.Feature = "CPUManager"
// owner: @szuecs
// alpha: v1.12
//
// Enable nodes to change CPUCFSQuotaPeriod
CPUCFSQuotaPeriod featuregate.Feature = "CustomCPUCFSQuotaPeriod"
// owner: @lmdaly
// alpha: v1.16
// beta: v1.18
//
// Enable resource managers to make NUMA aligned decisions
TopologyManager featuregate.Feature = "TopologyManager"
// owner: @cynepco3hahue(alukiano) @cezaryzukowski @k-wiatrzyk
// alpha: v1.21
// beta: v1.22
// Allows setting memory affinity for a container based on NUMA topology
MemoryManager featuregate.Feature = "MemoryManager"
// owner: @sjenning
// alpha: v1.4
// beta: v1.11
// ga: v1.21
//
// Enable pods to set sysctls on a pod
Sysctls featuregate.Feature = "Sysctls"
// owner: @pospispa
// GA: v1.11
//
// Postpone deletion of a PV or a PVC when they are being used
StorageObjectInUseProtection featuregate.Feature = "StorageObjectInUseProtection"
// owner: @dims, @derekwaynecarr
// alpha: v1.10
// beta: v1.14
// GA: v1.20
//
// Implement support for limiting pids in pods
SupportPodPidsLimit featuregate.Feature = "SupportPodPidsLimit"
// owner: @mikedanese
// alpha: v1.13
// beta: v1.21
// ga: v1.22
//
// Migrate ServiceAccount volumes to use a projected volume consisting of a
// ServiceAccountTokenVolumeProjection. This feature adds new required flags
// to the API server.
BoundServiceAccountTokenVolume featuregate.Feature = "BoundServiceAccountTokenVolume"
// owner: @mtaufen
// alpha: v1.18
// beta: v1.20
// stable: v1.21
//
// Enable OIDC discovery endpoints (issuer and JWKS URLs) for the service
// account issuer in the API server.
// Note these endpoints serve minimally-compliant discovery docs that are
// intended to be used for service account token verification.
ServiceAccountIssuerDiscovery featuregate.Feature = "ServiceAccountIssuerDiscovery"
// owner: @saad-ali
// ga: v1.10
//
// Allow mounting a subpath of a volume in a container
// Do not remove this feature gate even though it's GA
VolumeSubpath featuregate.Feature = "VolumeSubpath"
// owner: @pohly
// alpha: v1.14
// beta: v1.16
//
// Enables CSI Inline volumes support for pods
CSIInlineVolume featuregate.Feature = "CSIInlineVolume"
// owner: @pohly
// alpha: v1.19
// beta: v1.21
//
// Enables tracking of available storage capacity that CSI drivers provide.
CSIStorageCapacity featuregate.Feature = "CSIStorageCapacity"
// owner: @alculquicondor
// beta: v1.20
//
// Enables the use of PodTopologySpread scheduling plugin to do default
// spreading and disables legacy SelectorSpread plugin.
DefaultPodTopologySpread featuregate.Feature = "DefaultPodTopologySpread"
// owner: @pohly
// alpha: v1.19
// beta: v1.21
//
// Enables generic ephemeral inline volume support for pods
GenericEphemeralVolume featuregate.Feature = "GenericEphemeralVolume"
// owner: @chendave
// alpha: v1.21
// beta: v1.22
//
// PreferNominatedNode tells scheduler whether the nominated node will be checked first before looping
// all the rest of nodes in the cluster.
// Enabling this feature also implies the preemptor pod might not be dispatched to the best candidate in
// some corner case, e.g. another node releases enough resources after the nominated node has been set
// and hence is the best candidate instead.
PreferNominatedNode featuregate.Feature = "PreferNominatedNode"
// owner: @tallclair
// alpha: v1.12
// beta: v1.14
// GA: v1.20
//
// Enables RuntimeClass, for selecting between multiple runtimes to run a pod.
RuntimeClass featuregate.Feature = "RuntimeClass"
// owner: @mtaufen
// alpha: v1.12
// beta: v1.14
// GA: v1.17
//
// Kubelet uses the new Lease API to report node heartbeats,
// (Kube) Node Lifecycle Controller uses these heartbeats as a node health signal.
NodeLease featuregate.Feature = "NodeLease"
// owner: @rikatz
// kep: http://kep.k8s.io/2079
// alpha: v1.21
// beta: v1.22
//
// Enables the endPort field in NetworkPolicy to enable a Port Range behavior in Network Policies.
NetworkPolicyEndPort featuregate.Feature = "NetworkPolicyEndPort"
// owner: @jessfraz
// alpha: v1.12
//
// Enables control over ProcMountType for containers.
ProcMountType featuregate.Feature = "ProcMountType"
// owner: @janetkuo
// alpha: v1.12
//
// Allow TTL controller to clean up Pods and Jobs after they finish.
TTLAfterFinished featuregate.Feature = "TTLAfterFinished"
// owner: @alculquicondor
// alpha: v1.21
// beta: v1.22
//
// Allows Job controller to manage Pod completions per completion index.
IndexedJob featuregate.Feature = "IndexedJob"
// owner: @alculquicondor
// alpha: v1.22
//
// Track Job completion without relying on Pod remaining in the cluster
// indefinitely. Pod finalizers, in addition to a field in the Job status
// allow the Job controller to keep track of Pods that it didn't account for
// yet.
JobTrackingWithFinalizers featuregate.Feature = "JobTrackingWithFinalizers"
// owner: @dashpole
// alpha: v1.13
// beta: v1.15
//
// Enables the kubelet's pod resources grpc endpoint
KubeletPodResources featuregate.Feature = "KubeletPodResources"
// owner: @davidz627
// alpha: v1.14
// beta: v1.17
//
// Enables the in-tree storage to CSI Plugin migration feature.
CSIMigration featuregate.Feature = "CSIMigration"
// owner: @davidz627
// alpha: v1.14
// beta: v1.17
//
// Enables the GCE PD in-tree driver to GCE CSI Driver migration feature.
CSIMigrationGCE featuregate.Feature = "CSIMigrationGCE"
// owner: @Jiawei0227
// alpha: v1.21
//
// Disables the GCE PD in-tree driver.
InTreePluginGCEUnregister featuregate.Feature = "InTreePluginGCEUnregister"
// owner: @leakingtapan
// alpha: v1.14
// beta: v1.17
//
// Enables the AWS EBS in-tree driver to AWS EBS CSI Driver migration feature.
CSIMigrationAWS featuregate.Feature = "CSIMigrationAWS"
// owner: @leakingtapan
// alpha: v1.21
//
// Disables the AWS EBS in-tree driver.
InTreePluginAWSUnregister featuregate.Feature = "InTreePluginAWSUnregister"
// owner: @andyzhangx
// alpha: v1.15
// beta: v1.19
//
// Enables the Azure Disk in-tree driver to Azure Disk Driver migration feature.
CSIMigrationAzureDisk featuregate.Feature = "CSIMigrationAzureDisk"
// owner: @andyzhangx
// alpha: v1.21
//
// Disables the Azure Disk in-tree driver.
InTreePluginAzureDiskUnregister featuregate.Feature = "InTreePluginAzureDiskUnregister"
// owner: @andyzhangx
// alpha: v1.15
// beta: v1.21
//
// Enables the Azure File in-tree driver to Azure File Driver migration feature.
CSIMigrationAzureFile featuregate.Feature = "CSIMigrationAzureFile"
// owner: @andyzhangx
// alpha: v1.21
//
// Disables the Azure File in-tree driver.
InTreePluginAzureFileUnregister featuregate.Feature = "InTreePluginAzureFileUnregister"
// owner: @divyenpatel
// beta: v1.19 (requires: vSphere vCenter/ESXi Version: 7.0u1, HW Version: VM version 15)
//
// Enables the vSphere in-tree driver to vSphere CSI Driver migration feature.
CSIMigrationvSphere featuregate.Feature = "CSIMigrationvSphere"
// owner: @divyenpatel
// alpha: v1.21
//
// Disables the vSphere in-tree driver.
InTreePluginvSphereUnregister featuregate.Feature = "InTreePluginvSphereUnregister"
// owner: @adisky
// alpha: v1.14
// beta: v1.18
//
// Enables the OpenStack Cinder in-tree driver to OpenStack Cinder CSI Driver migration feature.
CSIMigrationOpenStack featuregate.Feature = "CSIMigrationOpenStack"
// owner: @adisky
// alpha: v1.21
//
// Disables the OpenStack Cinder in-tree driver.
InTreePluginOpenStackUnregister featuregate.Feature = "InTreePluginOpenStackUnregister"
// owner: @huffmanca
// alpha: v1.19
// beta: v1.20
//
// Determines if a CSI Driver supports applying fsGroup.
CSIVolumeFSGroupPolicy featuregate.Feature = "CSIVolumeFSGroupPolicy"
// owner: @gnufied
// alpha: v1.18
// beta: v1.20
// Allows user to configure volume permission change policy for fsGroups when mounting
// a volume in a Pod.
ConfigurableFSGroupPolicy featuregate.Feature = "ConfigurableFSGroupPolicy"
// owner: @gnufied, @verult
// alpha: v1.22
// If supported by the CSI driver, delegates the role of applying FSGroup to
// the driver by passing FSGroup through the NodeStageVolume and
// NodePublishVolume calls.
DelegateFSGroupToCSIDriver featuregate.Feature = "DelegateFSGroupToCSIDriver"
// owner: @RobertKrawitz, @derekwaynecarr
// beta: v1.15
// GA: v1.20
//
// Implement support for limiting pids in nodes
SupportNodePidsLimit featuregate.Feature = "SupportNodePidsLimit"
// owner: @RobertKrawitz
// alpha: v1.15
//
// Allow use of filesystems for ephemeral storage monitoring.
// Only applies if LocalStorageCapacityIsolation is set.
LocalStorageCapacityIsolationFSQuotaMonitoring featuregate.Feature = "LocalStorageCapacityIsolationFSQuotaMonitoring"
// owner: @denkensk
// alpha: v1.15
// beta: v1.19
//
// Enables NonPreempting option for priorityClass and pod.
NonPreemptingPriority featuregate.Feature = "NonPreemptingPriority"
// owner: @egernst
// alpha: v1.16
// beta: v1.18
//
// Enables PodOverhead, for accounting pod overheads which are specific to a given RuntimeClass
PodOverhead featuregate.Feature = "PodOverhead"
// owner: @khenidak
// kep: http://kep.k8s.io/563
// alpha: v1.15
// beta: v1.21
//
// Enables ipv6 dual stack
IPv6DualStack featuregate.Feature = "IPv6DualStack"
// owner: @robscott @freehan
// kep: http://kep.k8s.io/752
// alpha: v1.16
// beta: v1.18
// ga: v1.21
//
// Enable Endpoint Slices for more scalable Service endpoints.
EndpointSlice featuregate.Feature = "EndpointSlice"
// owner: @robscott @freehan
// kep: http://kep.k8s.io/752
// alpha: v1.18
// beta: v1.19
// ga: v1.22
//
// Enable Endpoint Slice consumption by kube-proxy for improved scalability.
EndpointSliceProxying featuregate.Feature = "EndpointSliceProxying"
// owner: @robscott @kumarvin123
// kep: http://kep.k8s.io/752
// alpha: v1.19
// beta: v1.21
// ga: v1.22
//
// Enable Endpoint Slice consumption by kube-proxy in Windows for improved scalability.
WindowsEndpointSliceProxying featuregate.Feature = "WindowsEndpointSliceProxying"
// owner: @matthyx
// alpha: v1.16
// beta: v1.18
// GA: v1.20
//
// Enables the startupProbe in kubelet worker.
StartupProbe featuregate.Feature = "StartupProbe"
// owner: @deads2k
// beta: v1.17
// GA: v1.21
//
// Enables the users to skip TLS verification of kubelets on pod logs requests
AllowInsecureBackendProxy featuregate.Feature = "AllowInsecureBackendProxy"
// owner: @mortent
// alpha: v1.3
// beta: v1.5
//
// Enable all logic related to the PodDisruptionBudget API object in policy
PodDisruptionBudget featuregate.Feature = "PodDisruptionBudget"
// owner: @alaypatel07, @soltysh
// alpha: v1.20
// beta: v1.21
// GA: v1.22
//
// CronJobControllerV2 controls whether the controller manager starts old cronjob
// controller or new one which is implemented with informers and delaying queue
CronJobControllerV2 featuregate.Feature = "CronJobControllerV2"
// owner: @smarterclayton
// alpha: v1.21
// beta: v1.22
// DaemonSets allow workloads to maintain availability during update per node
DaemonSetUpdateSurge featuregate.Feature = "DaemonSetUpdateSurge"
// owner: @wojtek-t
// alpha: v1.18
// beta: v1.19
// ga: v1.21
//
// Enables a feature to make secrets and configmaps data immutable.
ImmutableEphemeralVolumes featuregate.Feature = "ImmutableEphemeralVolumes"
// owner: @bart0sh
// alpha: v1.18
// beta: v1.19
// GA: 1.22
//
// Enables usage of HugePages-<size> in a volume medium,
// e.g. emptyDir:
// medium: HugePages-1Gi
HugePageStorageMediumSize featuregate.Feature = "HugePageStorageMediumSize"
// owner: @derekwaynecarr
// alpha: v1.20
// beta: v1.21 (off by default until 1.22)
//
// Enables usage of hugepages-<size> in downward API.
DownwardAPIHugePages featuregate.Feature = "DownwardAPIHugePages"
// owner: @bswartz
// alpha: v1.18
//
// Enables usage of any object for volume data source in PVCs
AnyVolumeDataSource featuregate.Feature = "AnyVolumeDataSource"
// owner: @javidiaz
// kep: http://kep.k8s.io/1797
// alpha: v1.19
// beta: v1.20
// GA: v1.22
//
// Allow setting the Fully Qualified Domain Name (FQDN) in the hostname of a Pod. If a Pod does not
// have FQDN, this feature has no effect.
SetHostnameAsFQDN featuregate.Feature = "SetHostnameAsFQDN"
// owner: @ksubrmnn
// alpha: v1.14
// beta: v1.20
//
// Allows kube-proxy to run in Overlay mode for Windows
WinOverlay featuregate.Feature = "WinOverlay"
// owner: @ksubrmnn
// alpha: v1.14
//
// Allows kube-proxy to create DSR loadbalancers for Windows
WinDSR featuregate.Feature = "WinDSR"
// owner: @RenaudWasTaken @dashpole
// alpha: v1.19
// beta: v1.20
//
// Disables Accelerator Metrics Collected by Kubelet
DisableAcceleratorUsageMetrics featuregate.Feature = "DisableAcceleratorUsageMetrics"
// owner: @arjunrn @mwielgus @josephburnett
// alpha: v1.20
//
// Add support for the HPA to scale based on metrics from individual containers
// in target pods
HPAContainerMetrics featuregate.Feature = "HPAContainerMetrics"
// owner: @andrewsykim
// kep: http://kep.k8s.io/1672
// alpha: v1.20
//
// Enable Terminating condition in Endpoint Slices.
EndpointSliceTerminatingCondition featuregate.Feature = "EndpointSliceTerminatingCondition"
// owner: @andrewsykim
// kep: http://kep.k8s.io/1669
// alpha: v1.22
//
// Enable kube-proxy to handle terminating ednpoints when externalTrafficPolicy=Local
ProxyTerminatingEndpoints featuregate.Feature = "ProxyTerminatingEndpoints"
// owner: @robscott
// kep: http://kep.k8s.io/752
// alpha: v1.20
//
// Enable NodeName field on Endpoint Slices.
EndpointSliceNodeName featuregate.Feature = "EndpointSliceNodeName"
// owner: @derekwaynecarr
// alpha: v1.20
// beta: v1.22
//
// Enables kubelet support to size memory backed volumes
SizeMemoryBackedVolumes featuregate.Feature = "SizeMemoryBackedVolumes"
// owner: @andrewsykim @SergeyKanzhelev
// GA: v1.20
//
// Ensure kubelet respects exec probe timeouts. Feature gate exists in-case existing workloads
// may depend on old behavior where exec probe timeouts were ignored.
// Lock to default and remove after v1.22 based on user feedback that should be reflected in KEP #1972 update
ExecProbeTimeout featuregate.Feature = "ExecProbeTimeout"
// owner: @andrewsykim
// alpha: v1.20
//
// Enable kubelet exec plugins for image pull credentials.
KubeletCredentialProviders featuregate.Feature = "KubeletCredentialProviders"
// owner: @andrewsykim
// alpha: v1.22
//
// Disable any functionality in kube-apiserver, kube-controller-manager and kubelet related to the `--cloud-provider` component flag.
DisableCloudProviders featuregate.Feature = "DisableCloudProviders"
// owner: @zshihang
// alpha: v1.20
// beta: v1.21
// ga: v1.22
//
// Enable kubelet to pass pod's service account token to NodePublishVolume
// call of CSI driver which is mounting volumes for that pod.
CSIServiceAccountToken featuregate.Feature = "CSIServiceAccountToken"
// owner: @bobbypage
// alpha: v1.20
// beta: v1.21
// Adds support for kubelet to detect node shutdown and gracefully terminate pods prior to the node being shutdown.
GracefulNodeShutdown featuregate.Feature = "GracefulNodeShutdown"
// owner: @andrewsykim @uablrek
// kep: http://kep.k8s.io/1864
// alpha: v1.20
// beta: v1.22
//
// Allows control if NodePorts shall be created for services with "type: LoadBalancer" by defining the spec.AllocateLoadBalancerNodePorts field (bool)
ServiceLBNodePortControl featuregate.Feature = "ServiceLBNodePortControl"
// owner: @janosi
// kep: http://kep.k8s.io/1435
// alpha: v1.20
//
// Enables the usage of different protocols in the same Service with type=LoadBalancer
MixedProtocolLBService featuregate.Feature = "MixedProtocolLBService"
// owner: @cofyc
// alpha: v1.21
VolumeCapacityPriority featuregate.Feature = "VolumeCapacityPriority"
// owner: @mattcary
// alpha: v1.22
//
// Enables policies controlling deletion of PVCs created by a StatefulSet.
StatefulSetAutoDeletePVC featuregate.Feature = "StatefulSetAutoDeletePVC"
// owner: @ahg-g
// alpha: v1.21
// beta: v1.22
//
// Enables controlling pod ranking on replicaset scale-down.
PodDeletionCost featuregate.Feature = "PodDeletionCost"
// owner: @robscott
// kep: http://kep.k8s.io/2433
// alpha: v1.21
//
// Enables topology aware hints for EndpointSlices
TopologyAwareHints featuregate.Feature = "TopologyAwareHints"
// owner: @ehashman
// alpha: v1.21
//
// Allows user to override pod-level terminationGracePeriod for probes
ProbeTerminationGracePeriod featuregate.Feature = "ProbeTerminationGracePeriod"
// owner: @ehashman
// alpha: v1.22
//
// Permits kubelet to run with swap enabled
NodeSwapEnabled featuregate.Feature = "NodeSwapEnabled"
// owner: @ahg-g
// alpha: v1.21
// beta: v1.22
//
// Allow specifying NamespaceSelector in PodAffinityTerm.
PodAffinityNamespaceSelector featuregate.Feature = "PodAffinityNamespaceSelector"
// owner: @andrewsykim @XudongLiuHarold
// kep: http://kep.k8s.io/1959
// alpha: v1.21
// beta: v1.22
//
// Enable support multiple Service "type: LoadBalancer" implementations in a cluster by specifying LoadBalancerClass
ServiceLoadBalancerClass featuregate.Feature = "ServiceLoadBalancerClass"
// owner: @damemi
// alpha: v1.21
// beta: v1.22
//
// Enables scaling down replicas via logarithmic comparison of creation/ready timestamps
LogarithmicScaleDown featuregate.Feature = "LogarithmicScaleDown"
// owner: @hbagdi
// kep: http://kep.k8s.io/2365
// alpha: v1.21
// beta: v1.22
//
// Enable Scope and Namespace fields on IngressClassParametersReference.
IngressClassNamespacedParams featuregate.Feature = "IngressClassNamespacedParams"
// owner: @maplain @andrewsykim
// kep: http://kep.k8s.io/2086
// alpha: v1.21
//
// Enables node-local routing for Service internal traffic
ServiceInternalTrafficPolicy featuregate.Feature = "ServiceInternalTrafficPolicy"
// owner: @adtac
// alpha: v1.21
// beta: v1.22
//
// Allows jobs to be created in the suspended state.
SuspendJob featuregate.Feature = "SuspendJob"
// owner: @fromanirh
// alpha: v1.21
//
// Enable POD resources API to return allocatable resources
KubeletPodResourcesGetAllocatable featuregate.Feature = "KubeletPodResourcesGetAllocatable"
// owner: @jayunit100 @abhiraut @rikatz
// kep: http://kep.k8s.io/2161
// beta: v1.21
// ga: v1.22
//
// Labels all namespaces with a default label "kubernetes.io/metadata.name: <namespaceName>"
NamespaceDefaultLabelName featuregate.Feature = "NamespaceDefaultLabelName"
// owner: @fengzixu
// alpha: v1.21
//
// Enables kubelet to detect CSI volume condition and send the event of the abnormal volume to the corresponding pod that is using it.
CSIVolumeHealth featuregate.Feature = "CSIVolumeHealth"
// owner: @marosset
// alpha: v1.22
//
// Enables support for 'HostProcess' containers on Windows nodes.
WindowsHostProcessContainers featuregate.Feature = "WindowsHostProcessContainers"
// owner: @ravig
// alpha: v1.22
//
// StatefulSetMinReadySeconds allows minReadySeconds to be respected by StatefulSet controller
StatefulSetMinReadySeconds featuregate.Feature = "StatefulSetMinReadySeconds"
// owner: @gjkim42
// kep: http://kep.k8s.io/2595
// alpha: v1.22
//
// Enables apiserver and kubelet to allow up to 32 DNSSearchPaths and up to 2048 DNSSearchListChars.
ExpandedDNSConfig featuregate.Feature = "ExpandedDNSConfig"
// owner: @saschagrunert
// alpha: v1.22
//
// Enables the use of `RuntimeDefault` as the default seccomp profile for all workloads.
SeccompDefault featuregate.Feature = "SeccompDefault"
// owner: @liggitt, @tallclair, sig-auth
// alpha: v1.22
//
// Enables the PodSecurity admission plugin
PodSecurity featuregate.Feature = "PodSecurity"
// owner: @chrishenzie
// alpha: v1.22
//
// Enables usage of the ReadWriteOncePod PersistentVolume access mode.
ReadWriteOncePod featuregate.Feature = "ReadWriteOncePod"
// owner: @enj
// beta: v1.22
//
// Allows clients to request a duration for certificates issued via the Kubernetes CSR API.
CSRDuration featuregate.Feature = "CSRDuration"
// owner: @AkihiroSuda
// alpha: v1.22
//
// Enables support for running kubelet in a user namespace.
// The user namespace has to be created before running kubelet.
// All the node components such as CRI need to be running in the same user namespace.
KubeletInUserNamespace featuregate.Feature = "KubeletInUserNamespace"
)
func init() {
runtime.Must(utilfeature.DefaultMutableFeatureGate.Add(defaultKubernetesFeatureGates))
}
// defaultKubernetesFeatureGates consists of all known Kubernetes-specific feature keys.
// To add a new feature, define a key for it above and add it here. The features will be
// available throughout Kubernetes binaries.
var defaultKubernetesFeatureGates = map[featuregate.Feature]featuregate.FeatureSpec{
AppArmor: {Default: true, PreRelease: featuregate.Beta},
DynamicKubeletConfig: {Default: false, PreRelease: featuregate.Deprecated}, // feature gate is deprecated in 1.22, remove no early than 1.23
ExperimentalHostUserNamespaceDefaultingGate: {Default: false, PreRelease: featuregate.Beta},
DevicePlugins: {Default: true, PreRelease: featuregate.Beta},
RotateKubeletServerCertificate: {Default: true, PreRelease: featuregate.Beta},
LocalStorageCapacityIsolation: {Default: true, PreRelease: featuregate.Beta},
Sysctls: {Default: true, PreRelease: featuregate.GA, LockToDefault: true}, // remove in 1.23
EphemeralContainers: {Default: false, PreRelease: featuregate.Alpha},
QOSReserved: {Default: false, PreRelease: featuregate.Alpha},
ExpandPersistentVolumes: {Default: true, PreRelease: featuregate.Beta},
ExpandInUsePersistentVolumes: {Default: true, PreRelease: featuregate.Beta},
ExpandCSIVolumes: {Default: true, PreRelease: featuregate.Beta},
CPUManager: {Default: true, PreRelease: featuregate.Beta},
MemoryManager: {Default: true, PreRelease: featuregate.Beta},
CPUCFSQuotaPeriod: {Default: false, PreRelease: featuregate.Alpha},
TopologyManager: {Default: true, PreRelease: featuregate.Beta},
StorageObjectInUseProtection: {Default: true, PreRelease: featuregate.GA},
SupportPodPidsLimit: {Default: true, PreRelease: featuregate.GA, LockToDefault: true}, // remove in 1.23
SupportNodePidsLimit: {Default: true, PreRelease: featuregate.GA, LockToDefault: true}, // remove in 1.23
BoundServiceAccountTokenVolume: {Default: true, PreRelease: featuregate.GA, LockToDefault: true}, // remove in 1.23
ServiceAccountIssuerDiscovery: {Default: true, PreRelease: featuregate.GA, LockToDefault: true}, // remove in 1.22
CSIMigration: {Default: true, PreRelease: featuregate.Beta},
CSIMigrationGCE: {Default: false, PreRelease: featuregate.Beta}, // Off by default (requires GCE PD CSI Driver)
InTreePluginGCEUnregister: {Default: false, PreRelease: featuregate.Alpha},
CSIMigrationAWS: {Default: false, PreRelease: featuregate.Beta}, // Off by default (requires AWS EBS CSI driver)
InTreePluginAWSUnregister: {Default: false, PreRelease: featuregate.Alpha},
CSIMigrationAzureDisk: {Default: false, PreRelease: featuregate.Beta}, // Off by default (requires Azure Disk CSI driver)
InTreePluginAzureDiskUnregister: {Default: false, PreRelease: featuregate.Alpha},
CSIMigrationAzureFile: {Default: false, PreRelease: featuregate.Beta}, // Off by default (requires Azure File CSI driver)
InTreePluginAzureFileUnregister: {Default: false, PreRelease: featuregate.Alpha},
CSIMigrationvSphere: {Default: false, PreRelease: featuregate.Beta}, // Off by default (requires vSphere CSI driver)
InTreePluginvSphereUnregister: {Default: false, PreRelease: featuregate.Alpha},
CSIMigrationOpenStack: {Default: true, PreRelease: featuregate.Beta},
InTreePluginOpenStackUnregister: {Default: false, PreRelease: featuregate.Alpha},
VolumeSubpath: {Default: true, PreRelease: featuregate.GA},
ConfigurableFSGroupPolicy: {Default: true, PreRelease: featuregate.Beta},
CSIInlineVolume: {Default: true, PreRelease: featuregate.Beta},
CSIStorageCapacity: {Default: true, PreRelease: featuregate.Beta},
CSIServiceAccountToken: {Default: true, PreRelease: featuregate.GA, LockToDefault: true}, // remove in 1.23
GenericEphemeralVolume: {Default: true, PreRelease: featuregate.Beta},
CSIVolumeFSGroupPolicy: {Default: true, PreRelease: featuregate.Beta},
RuntimeClass: {Default: true, PreRelease: featuregate.GA, LockToDefault: true}, // remove in 1.23
NodeLease: {Default: true, PreRelease: featuregate.GA, LockToDefault: true},
NetworkPolicyEndPort: {Default: true, PreRelease: featuregate.Beta},
ProcMountType: {Default: false, PreRelease: featuregate.Alpha},
TTLAfterFinished: {Default: true, PreRelease: featuregate.Beta},
IndexedJob: {Default: true, PreRelease: featuregate.Beta},
JobTrackingWithFinalizers: {Default: false, PreRelease: featuregate.Alpha},
KubeletPodResources: {Default: true, PreRelease: featuregate.Beta},
LocalStorageCapacityIsolationFSQuotaMonitoring: {Default: false, PreRelease: featuregate.Alpha},
NonPreemptingPriority: {Default: true, PreRelease: featuregate.Beta},
PodOverhead: {Default: true, PreRelease: featuregate.Beta},
IPv6DualStack: {Default: true, PreRelease: featuregate.Beta},
EndpointSlice: {Default: true, PreRelease: featuregate.GA, LockToDefault: true}, // remove in 1.25
EndpointSliceProxying: {Default: true, PreRelease: featuregate.GA, LockToDefault: true}, // remove in 1.25
EndpointSliceTerminatingCondition: {Default: false, PreRelease: featuregate.Alpha},
ProxyTerminatingEndpoints: {Default: false, PreRelease: featuregate.Alpha},
EndpointSliceNodeName: {Default: true, PreRelease: featuregate.GA, LockToDefault: true}, //remove in 1.25
WindowsEndpointSliceProxying: {Default: true, PreRelease: featuregate.GA, LockToDefault: true}, // remove in 1.25
StartupProbe: {Default: true, PreRelease: featuregate.GA, LockToDefault: true}, // remove in 1.23
AllowInsecureBackendProxy: {Default: true, PreRelease: featuregate.GA, LockToDefault: true}, // remove in 1.23
PodDisruptionBudget: {Default: true, PreRelease: featuregate.GA, LockToDefault: true}, // remove in 1.25
CronJobControllerV2: {Default: true, PreRelease: featuregate.GA, LockToDefault: true}, // remove in 1.23
DaemonSetUpdateSurge: {Default: true, PreRelease: featuregate.Beta}, // on by default in 1.22
ImmutableEphemeralVolumes: {Default: true, PreRelease: featuregate.GA, LockToDefault: true}, // remove in 1.24
HugePageStorageMediumSize: {Default: true, PreRelease: featuregate.GA, LockToDefault: true}, // remove in 1.23
DownwardAPIHugePages: {Default: false, PreRelease: featuregate.Beta}, // on by default in 1.22
AnyVolumeDataSource: {Default: false, PreRelease: featuregate.Alpha},
DefaultPodTopologySpread: {Default: true, PreRelease: featuregate.Beta},
SetHostnameAsFQDN: {Default: true, PreRelease: featuregate.GA, LockToDefault: true}, //remove in 1.24
WinOverlay: {Default: true, PreRelease: featuregate.Beta},
WinDSR: {Default: false, PreRelease: featuregate.Alpha},
DisableAcceleratorUsageMetrics: {Default: true, PreRelease: featuregate.Beta},
HPAContainerMetrics: {Default: false, PreRelease: featuregate.Alpha},
SizeMemoryBackedVolumes: {Default: true, PreRelease: featuregate.Beta},
ExecProbeTimeout: {Default: true, PreRelease: featuregate.GA}, // lock to default and remove after v1.22 based on KEP #1972 update
KubeletCredentialProviders: {Default: false, PreRelease: featuregate.Alpha},
GracefulNodeShutdown: {Default: true, PreRelease: featuregate.Beta},
ServiceLBNodePortControl: {Default: true, PreRelease: featuregate.Beta},
MixedProtocolLBService: {Default: false, PreRelease: featuregate.Alpha},
VolumeCapacityPriority: {Default: false, PreRelease: featuregate.Alpha},
PreferNominatedNode: {Default: true, PreRelease: featuregate.Beta},
ProbeTerminationGracePeriod: {Default: false, PreRelease: featuregate.Alpha},
NodeSwapEnabled: {Default: false, PreRelease: featuregate.Alpha},
PodDeletionCost: {Default: true, PreRelease: featuregate.Beta},
StatefulSetAutoDeletePVC: {Default: false, PreRelease: featuregate.Alpha},
TopologyAwareHints: {Default: false, PreRelease: featuregate.Alpha},
PodAffinityNamespaceSelector: {Default: true, PreRelease: featuregate.Beta},
ServiceLoadBalancerClass: {Default: true, PreRelease: featuregate.Beta},
IngressClassNamespacedParams: {Default: true, PreRelease: featuregate.Beta},
ServiceInternalTrafficPolicy: {Default: true, PreRelease: featuregate.Beta},
LogarithmicScaleDown: {Default: true, PreRelease: featuregate.Beta},
SuspendJob: {Default: true, PreRelease: featuregate.Beta},
KubeletPodResourcesGetAllocatable: {Default: false, PreRelease: featuregate.Alpha},
NamespaceDefaultLabelName: {Default: true, PreRelease: featuregate.GA, LockToDefault: true}, // remove in 1.24
CSIVolumeHealth: {Default: false, PreRelease: featuregate.Alpha},
WindowsHostProcessContainers: {Default: false, PreRelease: featuregate.Alpha},
DisableCloudProviders: {Default: false, PreRelease: featuregate.Alpha},
StatefulSetMinReadySeconds: {Default: false, PreRelease: featuregate.Alpha},
ExpandedDNSConfig: {Default: false, PreRelease: featuregate.Alpha},
SeccompDefault: {Default: false, PreRelease: featuregate.Alpha},
PodSecurity: {Default: false, PreRelease: featuregate.Alpha},
ReadWriteOncePod: {Default: false, PreRelease: featuregate.Alpha},
CSRDuration: {Default: true, PreRelease: featuregate.Beta},
DelegateFSGroupToCSIDriver: {Default: false, PreRelease: featuregate.Alpha},
KubeletInUserNamespace: {Default: false, PreRelease: featuregate.Alpha},
// inherited features from generic apiserver, relisted here to get a conflict if it is changed
// unintentionally on either side:
genericfeatures.StreamingProxyRedirects: {Default: false, PreRelease: featuregate.Deprecated}, // remove in 1.24
genericfeatures.ValidateProxyRedirects: {Default: true, PreRelease: featuregate.Deprecated},
genericfeatures.AdvancedAuditing: {Default: true, PreRelease: featuregate.GA},
genericfeatures.APIResponseCompression: {Default: true, PreRelease: featuregate.Beta},
genericfeatures.APIListChunking: {Default: true, PreRelease: featuregate.Beta},
genericfeatures.DryRun: {Default: true, PreRelease: featuregate.GA},
genericfeatures.ServerSideApply: {Default: true, PreRelease: featuregate.GA},
genericfeatures.APIPriorityAndFairness: {Default: true, PreRelease: featuregate.Beta},
genericfeatures.WarningHeaders: {Default: true, PreRelease: featuregate.GA, LockToDefault: true}, // remove in 1.24
// features that enable backwards compatibility but are scheduled to be removed
// ...
HPAScaleToZero: {Default: false, PreRelease: featuregate.Alpha},
}
| {'content_hash': '0fd7988211548de8544fe4bc47110fbe', 'timestamp': '', 'source': 'github', 'line_count': 903, 'max_line_length': 161, 'avg_line_length': 40.74640088593577, 'alnum_prop': 0.6992716203728869, 'repo_name': 'idvoretskyi/kubernetes', 'id': '6a9e3c4313d568a253a90d8e60ac22e9c119c2ca', 'size': '37363', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'pkg/features/kube_features.go', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'C', 'bytes': '2840'}, {'name': 'Dockerfile', 'bytes': '56641'}, {'name': 'Go', 'bytes': '43952066'}, {'name': 'HTML', 'bytes': '1199455'}, {'name': 'Lua', 'bytes': '17200'}, {'name': 'Makefile', 'bytes': '67633'}, {'name': 'Python', 'bytes': '2937989'}, {'name': 'Ruby', 'bytes': '1780'}, {'name': 'Shell', 'bytes': '1514965'}, {'name': 'sed', 'bytes': '11607'}]} |
#pragma once
#include <stdint.h>
#include "il2cpp-config.h"
#include "object-internals.h"
struct Il2CppObject;
namespace il2cpp
{
namespace icalls
{
namespace mscorlib
{
namespace System
{
namespace Runtime
{
namespace InteropServices
{
class LIBIL2CPP_CODEGEN_API GCHandle
{
public:
static bool CheckCurrentDomain (int32_t handle);
static void FreeHandle (int32_t handle);
static Il2CppIntPtr GetAddrOfPinnedObject (int32_t handle);
static Il2CppObject * GetTarget (int32_t handle);
static int32_t GetTargetHandle (Il2CppObject * obj, int32_t handle, int32_t type);
};
} /* namespace InteropServices */
} /* namespace Runtime */
} /* namespace System */
} /* namespace mscorlib */
} /* namespace icalls */
} /* namespace il2cpp */
| {'content_hash': '8947bdb143fa766a118db012f8fa6e99', 'timestamp': '', 'source': 'github', 'line_count': 37, 'max_line_length': 83, 'avg_line_length': 20.054054054054053, 'alnum_prop': 0.7452830188679245, 'repo_name': 'WestlakeAPC/unity-game', 'id': 'ee32580fc115fe0089f87c71f681dc636a091ddb', 'size': '742', 'binary': False, 'copies': '3', 'ref': 'refs/heads/master', 'path': 'Xcode Project/Classes/Libraries/libil2cpp/include/icalls/mscorlib/System.Runtime.InteropServices/GCHandle.h', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'C', 'bytes': '2203895'}, {'name': 'C++', 'bytes': '30788244'}, {'name': 'Objective-C', 'bytes': '60881'}, {'name': 'Objective-C++', 'bytes': '297711'}, {'name': 'Shell', 'bytes': '1736'}]} |
#ifndef __ANALLOC2_BITMAP_HPP__
#define __ANALLOC2_BITMAP_HPP__
#include "../abstract/offset-aligner.hpp"
#include "raw-bitmap.hpp"
namespace analloc {
/**
* An offset aligner which runs in O(n) time for all operations.
*/
template <typename Unit, typename AddressType, typename SizeType = AddressType>
class Bitmap
: protected RawBitmap<Unit, SizeType>,
public virtual OffsetAligner<AddressType, SizeType> {
public:
typedef RawBitmap<Unit, SizeType> super;
/**
* Create a new [Bitmap] given a region of memory [ptr] and a bit count [bc].
*/
Bitmap(Unit * ptr, SizeType bc) : super(ptr, bc) {
// Zero the buffer as efficiently as possible without overwriting any bits
// that this bitmap doesn't own
SizeType fullUnits = bc / this->UnitBitCount;
for (SizeType i = 0; i < fullUnits; ++i) {
ptr[(size_t)i] = 0;
}
for (SizeType i = fullUnits * this->UnitBitCount; i < bc; ++i) {
this->SetBit(i, 0);
}
}
virtual bool Alloc(AddressType & addressOut, SizeType size) {
// Special cases
if (size > this->GetBitCount()) {
return false;
} else if (!size) {
addressOut = 0;
return true;
}
// Keep attempting to find free regions
SizeType index = 0;
while (NextFree(index, size - 1)) {
if (Reserve(index + 1, size - 1, &index)) {
this->SetBit(index, true);
addressOut = (AddressType)index;
return true;
}
}
return false;
}
virtual bool OffsetAlign(AddressType & addressOut, AddressType align,
AddressType offset, SizeType size) {
if (size > this->GetBitCount()) {
return false;
} else if (align < 2 || !size) {
return this->Alloc(addressOut, size);
}
SizeType index = 0;
while (NextFreeAligned(index, offset, align, size - 1)) {
if (this->Reserve(index + 1, size - 1, &index)) {
this->SetBit(index, true);
addressOut = (AddressType)index;
return true;
}
}
return false;
}
virtual void Dealloc(AddressType address, SizeType size) {
assert((SizeType)address == address);
assert(!ansa::AddWraps<SizeType>((SizeType)address, size));
assert((SizeType)address + size <= this->GetBitCount());
for (SizeType i = (SizeType)address; i < (SizeType)address + size; ++i) {
this->SetBit(i, false);
}
}
using super::GetBitCount;
protected:
inline bool NextFree(SizeType & idx, SizeType afterSize) {
assert(afterSize <= this->GetBitCount());
SizeType i = idx;
while (i < this->GetBitCount() - afterSize) {
if (!this->GetBit(i)) {
idx = i;
return true;
} else if (i % this->UnitBitCount) {
++i;
} else {
// BitScanRight will allow us to quickly process the value
Unit unit = this->UnitAt(i / this->UnitBitCount);
unsigned int add = (unsigned int)ansa::BitScanRight<Unit>(~unit);
if ((SizeType)add != add || ansa::AddWraps<SizeType>(i, add)) {
return false;
}
i += add;
}
}
return false;
}
inline bool NextFreeAligned(SizeType & idx, AddressType offset,
AddressType align, SizeType afterSize) {
assert(afterSize <= this->GetBitCount());
SizeType i = idx;
if (!AlignIndex(i, align, offset)) {
return false;
}
// If the alignment is bigger than [SizeType]'s max, the first attempt is
// the only possible one
if ((SizeType)align != align) {
if (!this->GetBit(i)) {
idx = i;
return true;
} else {
return false;
}
}
// If [UnitBitCount] is larger than [align], skipping using BitScanRight is
// worth it.
bool jumpByUnit = this->UnitBitCount > align;
while (i < this->GetBitCount() - afterSize) {
// Skip to the next aligned region
if (!this->GetBit(i)) {
idx = i;
return true;
} else if (jumpByUnit && !(i % this->UnitBitCount)) {
// BitScanRight will allow us to quickly skip the unit
Unit unit = this->UnitAt(i / this->UnitBitCount);
unsigned int add = (unsigned int)ansa::BitScanRight<Unit>(~unit);
assert((SizeType)add == add);
if (ansa::AddWraps<SizeType>(add, i)) {
return false;
}
i += add;
if (!AlignIndex(i, align, offset)) {
return false;
}
} else {
// Add the entire alignment
if (ansa::AddWraps<SizeType>(i, (SizeType)align)) {
return false;
}
i += (SizeType)align;
}
}
return false;
}
inline bool Reserve(SizeType idx, SizeType size, SizeType * firstUsed) {
assert(!ansa::AddWraps<SizeType>(idx, size));
assert(idx + size <= this->GetBitCount());
// Make sure that the next [size] cells are free.
for (SizeType i = 0; i < size; ++i) {
if (this->GetBit(idx + i)) {
if (firstUsed) {
(*firstUsed) = idx + i;
}
return false;
}
}
// Reserve all the cells
for (SizeType i = 0; i < size; ++i) {
this->SetBit(idx + i, true);
}
return true;
}
/**
* Set an index to the next aligned index.
*/
inline bool AlignIndex(SizeType & i, AddressType align, AddressType offset) {
AddressType misalignment = (offset + i) % align;
if (misalignment) {
AddressType add = align - misalignment;
if ((SizeType)add != add ||
ansa::AddWraps<SizeType>(i, (SizeType)add)) {
return false;
}
i += add;
}
return true;
}
};
}
#endif
| {'content_hash': '9a018efbfabd003fce3f72d30e523c89', 'timestamp': '', 'source': 'github', 'line_count': 196, 'max_line_length': 79, 'avg_line_length': 28.775510204081634, 'alnum_prop': 0.574290780141844, 'repo_name': 'unixpickle/analloc2', 'id': '707516dde44aff351467c1b1ee69460077810129', 'size': '5640', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'src/bitmap/bitmap.hpp', 'mode': '33188', 'license': 'bsd-2-clause', 'language': [{'name': 'C++', 'bytes': '274112'}, {'name': 'Dart', 'bytes': '10995'}]} |
<?php
namespace Symfony\Component\ClassLoader;
@trigger_error('The '.__NAMESPACE__.'\XcacheClassLoader class is deprecated since version 3.3 and will be removed in 4.0. Use `composer install --apcu-autoloader` instead.', E_USER_DEPRECATED);
/**
* XcacheClassLoader implements a wrapping autoloader cached in XCache for PHP 5.3.
*
* It expects an object implementing a findFile method to find the file. This
* allows using it as a wrapper around the other loaders of the component (the
* ClassLoader for instance) but also around any other autoloaders following
* this convention (the Composer one for instance).
*
* // with a Symfony autoloader
* $loader = new ClassLoader();
* $loader->addPrefix('Symfony\Component', __DIR__.'/component');
* $loader->addPrefix('Symfony', __DIR__.'/framework');
*
* // or with a Composer autoloader
* use Composer\Autoload\ClassLoader;
*
* $loader = new ClassLoader();
* $loader->add('Symfony\Component', __DIR__.'/component');
* $loader->add('Symfony', __DIR__.'/framework');
*
* $cachedLoader = new XcacheClassLoader('my_prefix', $loader);
*
* // activate the cached autoloader
* $cachedLoader->register();
*
* // eventually deactivate the non-cached loader if it was registered previously
* // to be sure to use the cached one.
* $loader->unregister();
*
* @author Fabien Potencier <[email protected]>
* @author Kris Wallsmith <[email protected]>
* @author Kim Hemsø Rasmussen <[email protected]>
*
* @deprecated since version 3.3, to be removed in 4.0. Use `composer install --apcu-autoloader` instead.
*/
class XcacheClassLoader
{
private $prefix;
private $decorated;
/**
* @param string $prefix The XCache namespace prefix to use
* @param object $decorated A class loader object that implements the findFile() method
*
* @throws \RuntimeException
* @throws \InvalidArgumentException
*/
public function __construct($prefix, $decorated)
{
if (!extension_loaded('xcache')) {
throw new \RuntimeException('Unable to use XcacheClassLoader as XCache is not enabled.');
}
if (!method_exists($decorated, 'findFile')) {
throw new \InvalidArgumentException('The class finder must implement a "findFile" method.');
}
$this->prefix = $prefix;
$this->decorated = $decorated;
}
/**
* Registers this instance as an autoloader.
*
* @param bool $prepend Whether to prepend the autoloader or not
*/
public function register($prepend = false)
{
spl_autoload_register(array($this, 'loadClass'), true, $prepend);
}
/**
* Unregisters this instance as an autoloader.
*/
public function unregister()
{
spl_autoload_unregister(array($this, 'loadClass'));
}
/**
* Loads the given class or interface.
*
* @param string $class The name of the class
*
* @return bool|null True, if loaded
*/
public function loadClass($class)
{
if ($file = $this->findFile($class)) {
require $file;
return true;
}
}
/**
* Finds a file by class name while caching lookups to Xcache.
*
* @param string $class A class name to resolve to file
*
* @return string|null
*/
public function findFile($class)
{
if (xcache_isset($this->prefix.$class)) {
$file = xcache_get($this->prefix.$class);
} else {
$file = $this->decorated->findFile($class) ?: null;
xcache_set($this->prefix.$class, $file);
}
return $file;
}
/**
* Passes through all unknown calls onto the decorated object.
*/
public function __call($method, $args)
{
return call_user_func_array(array($this->decorated, $method), $args);
}
}
| {'content_hash': 'a9e68216eaa4d9ef4dc9edbd175b8745', 'timestamp': '', 'source': 'github', 'line_count': 130, 'max_line_length': 193, 'avg_line_length': 30.407692307692308, 'alnum_prop': 0.6164938021755628, 'repo_name': 'smoers/bird', 'id': '74dbdc85d25e5ef18bf587d5146767c4eae82e90', 'size': '4183', 'binary': False, 'copies': '3', 'ref': 'refs/heads/master', 'path': 'vendor/symfony/symfony/src/Symfony/Component/ClassLoader/XcacheClassLoader.php', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '33967'}, {'name': 'HTML', 'bytes': '128814'}, {'name': 'JavaScript', 'bytes': '106639'}, {'name': 'PHP', 'bytes': '335721'}]} |
package com.google.gapid.server;
import static com.google.gapid.util.Logging.logLevel;
import static java.util.logging.Level.INFO;
import static java.util.logging.Level.WARNING;
import com.google.common.collect.Lists;
import com.google.gapid.models.Settings;
import com.google.gapid.server.Tracer.TraceRequest;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.util.List;
import java.util.function.Consumer;
import java.util.logging.Logger;
/**
* {@link ChildProcess} for the "gapit trace" command to handle capturing an API trace.
*/
public class GapitTraceProcess extends ChildProcess<Boolean> {
private static final Logger LOG = Logger.getLogger(GapitTraceProcess.class.getName());
private static final String TRACING_MESSAGE = "Press enter to stop capturing...";
private final TraceRequest request;
private final Consumer<String> onOutput;
public GapitTraceProcess(Settings settings, TraceRequest request, Consumer<String> onOutput) {
super("gapit", settings);
this.request = request;
this.onOutput = onOutput;
}
@Override
protected Exception prepare(ProcessBuilder pb) {
File gapit = GapiPaths.gapit();
if (gapit == null || !gapit.exists()) {
LOG.log(WARNING, "Could not find gapit for tracing.");
return new Exception("Could not find the gapit executable.");
}
List<String> args = Lists.newArrayList();
args.add(gapit.getAbsolutePath());
args.add("-log-level");
args.add(logLevel.get().gapisLevel);
args.add("trace");
String adb = GapiPaths.adb(settings);
if (!adb.isEmpty()) {
args.add("--adb");
args.add(adb);
}
request.appendCommandLine(args);
pb.command(args);
return null;
}
@Override
protected OutputHandler<Boolean> createStdoutHandler() {
return new LoggingStringHandler<Boolean>(LOG, name, false, line -> {
if (TRACING_MESSAGE.equals(line)) {
onOutput.accept("Tracing...");
return Boolean.TRUE;
}
onOutput.accept(line);
return null;
});
}
@Override
protected OutputHandler<Boolean> createStderrHandler() {
return new LoggingStringHandler<Boolean>(LOG, name, true, line -> {
onOutput.accept(line);
return null;
});
}
/**
* Only required for mid execution capture. Since the (current) mechanism to start the trace
* is the same as to end it, this function should never be called for non mid execution captures.
*/
public void startTracing() {
if (isRunning()) {
LOG.log(INFO, "Attempting to start the trace.");
sendEnter();
}
}
public void stopTracing() {
if (isRunning()) {
LOG.log(INFO, "Attempting to end the trace.");
sendEnter();
}
}
private void sendEnter() {
try {
OutputStream out = process.getOutputStream();
out.write('\n');
out.flush();
} catch (IOException e) {
LOG.log(WARNING, "Failed to send the 'enter' command to the trace", e);
shutdown();
}
}
}
| {'content_hash': '4d4800df8773c32bed509a31fef4b355', 'timestamp': '', 'source': 'github', 'line_count': 113, 'max_line_length': 99, 'avg_line_length': 26.858407079646017, 'alnum_prop': 0.6738056013179572, 'repo_name': 'ek9852/gapid', 'id': 'd43526001fb8f8326961b20afe657a404dfd0b4f', 'size': '3634', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'gapic/src/main/com/google/gapid/server/GapitTraceProcess.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Batchfile', 'bytes': '7873'}, {'name': 'C', 'bytes': '227802'}, {'name': 'C++', 'bytes': '1158948'}, {'name': 'CMake', 'bytes': '402027'}, {'name': 'GLSL', 'bytes': '24812'}, {'name': 'Go', 'bytes': '4685920'}, {'name': 'HTML', 'bytes': '128711'}, {'name': 'Java', 'bytes': '1008429'}, {'name': 'JavaScript', 'bytes': '13177'}, {'name': 'Objective-C++', 'bytes': '12872'}, {'name': 'Protocol Buffer', 'bytes': '127967'}, {'name': 'Python', 'bytes': '917'}, {'name': 'Shell', 'bytes': '13371'}]} |
package tripbook.main;
import tripbook.main.AndroidMultiPartEntity.ProgressListener;
import java.io.File;
import java.io.IOException;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.mime.content.FileBody;
import org.apache.http.entity.mime.content.StringBody;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.util.EntityUtils;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.SharedPreferences;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Color;
import android.graphics.drawable.ColorDrawable;
import android.os.AsyncTask;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.ProgressBar;
import android.widget.TextView;
import android.widget.Toast;
import android.widget.VideoView;
public class UploadActivity extends Activity {
// LogCat tag
private static final String TAG = AddPost.class.getSimpleName();
private ProgressBar progressBar;
private String filePath = null;
private TextView txtPercentage;
private ImageView imgPreview;
private Button btnUpload;
long totalSize = 0;
private EditText etplace;
private EditText etexp;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_upload);
txtPercentage = (TextView) findViewById(R.id.txtPercentage);
btnUpload = (Button) findViewById(R.id.btnUpload);
progressBar = (ProgressBar) findViewById(R.id.progressBar);
imgPreview = (ImageView) findViewById(R.id.imgPreview);
etplace=(EditText) findViewById(R.id.etplace);
etexp=(EditText) findViewById(R.id.etExp);
// Changing action bar background color
// getActionBar().setBackgroundDrawable(
// new ColorDrawable(Color.parseColor(getResources().getString(
// R.color.action_bar))));
// Receiving the data from previous activity
Intent i = getIntent();
// image or video path that is captured in previous activity
filePath = i.getStringExtra("filePath");
// boolean flag to identify the media type, image or video
boolean isImage = i.getBooleanExtra("isImage", true);
if (filePath != null) {
// Displaying the image or video on the screen
previewMedia(isImage);
} else {
Toast.makeText(getApplicationContext(),
"Sorry, file path is missing!", Toast.LENGTH_LONG).show();
}
btnUpload.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
// uploading the file to server
new UploadFileToServer().execute();
}
});
}
/**
* Displaying captured image/video on the screen
* */
private void previewMedia(boolean isImage) {
// Checking whether captured media is image or video
if (isImage) {
imgPreview.setVisibility(View.VISIBLE);
// bimatp factory
BitmapFactory.Options options = new BitmapFactory.Options();
// down sizing image as it throws OutOfMemory Exception for larger
// images
options.inSampleSize = 8;
final Bitmap bitmap = BitmapFactory.decodeFile(filePath, options);
imgPreview.setImageBitmap(bitmap);
} else {
imgPreview.setVisibility(View.GONE);
}
}
/**
* Uploading the file to server
* */
private class UploadFileToServer extends AsyncTask<Void, Integer, String> {
@Override
protected void onPreExecute() {
// setting progress bar to zero
progressBar.setProgress(0);
super.onPreExecute();
}
@Override
protected void onProgressUpdate(Integer... progress) {
// Making progress bar visible
progressBar.setVisibility(View.VISIBLE);
// updating progress bar value
progressBar.setProgress(progress[0]);
// updating percentage value
txtPercentage.setText(String.valueOf(progress[0]) + "%");
}
@Override
protected String doInBackground(Void... params) {
return uploadFile();
}
@SuppressWarnings("deprecation")
private String uploadFile() {
String responseString = null;
HttpClient httpclient = new DefaultHttpClient();
HttpPost httppost = new HttpPost(Config.FILE_UPLOAD_URL);
try {
AndroidMultiPartEntity entity = new AndroidMultiPartEntity(
new ProgressListener() {
@Override
public void transferred(long num) {
publishProgress((int) ((num / (float) totalSize) * 100));
}
});
File sourceFile = new File(filePath);
// Adding file data to http body
entity.addPart("InputFile", new FileBody(sourceFile));
// Extra parameters if you want to pass to server
entity.addPart("sessionuser",
new StringBody(getDefaults("namestore", getApplicationContext()).toString()));
entity.addPart("InputName",
new StringBody(etplace.getText().toString()));
entity.addPart("InputMessage", new StringBody(etexp.getText().toString()));
totalSize = entity.getContentLength();
httppost.setEntity(entity);
// Making server call
HttpResponse response = httpclient.execute(httppost);
HttpEntity r_entity = response.getEntity();
int statusCode = response.getStatusLine().getStatusCode();
if (statusCode == 200) {
// Server response
responseString = EntityUtils.toString(r_entity);
} else {
responseString = "Error occurred! Http Status Code: "
+ statusCode;
}
} catch (ClientProtocolException e) {
responseString = e.toString();
} catch (IOException e) {
responseString = e.toString();
}
return responseString;
}
@Override
protected void onPostExecute(String result) {
Log.e(TAG, "Response from server: " + result);
// showing the server response in an alert dialog
showAlert(result);
super.onPostExecute(result);
}
}
/**
* Method to show alert dialog
* */
private void showAlert(String message) {
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setMessage("Success!!").setTitle("Response from Servers")
.setCancelable(false)
.setPositiveButton("OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
// do nothing
}
});
AlertDialog alert = builder.create();
alert.show();
}
public static String getDefaults(String key, Context context) {
SharedPreferences preferences = PreferenceManager.getDefaultSharedPreferences(context);
return preferences.getString(key, null);
}
} | {'content_hash': 'f7111621a4f3fdf2ddea69c7955bfd35', 'timestamp': '', 'source': 'github', 'line_count': 240, 'max_line_length': 92, 'avg_line_length': 28.445833333333333, 'alnum_prop': 0.7301889556174015, 'repo_name': 'ITheBK/TripBookAndroid', 'id': 'e1e0176034adaa62776907b1a8687ab5ea3aa391', 'size': '6827', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'src/tripbook/main/UploadActivity.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Java', 'bytes': '125192'}]} |
class ClientError extends Error{
constructor(msg,id){
super(msg,id);
this.name='ClientError';
}
static getClassName(){
return 'ClientError';
}
}
module.exports=ClientError;
| {'content_hash': 'ed3ca221aed93bb269c697b5d9459a40', 'timestamp': '', 'source': 'github', 'line_count': 11, 'max_line_length': 32, 'avg_line_length': 19.454545454545453, 'alnum_prop': 0.616822429906542, 'repo_name': 'jiangysh/51z10', 'id': '9ca64075ab2fd6fb3859c78f291c9f170c8419bd', 'size': '216', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'test/ClientError.js', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '1106456'}, {'name': 'HTML', 'bytes': '653858'}, {'name': 'JavaScript', 'bytes': '4319691'}]} |
## Copyright (c) 2014 Citrix Systems, Inc. All Rights Reserved.
## You may only reproduce, distribute, perform, display, or prepare derivative works of this file pursuant to a valid license from Citrix.
from lib.hyperv import HyperV # the class
from lib.hyperv import hyperv # the connection
import lib.cloudstack
from lib.cloudstack import cs
import json
import hashlib
import ntpath
import os
import pprint
import sys
import time
import re
import traceback
import lib.config_manager
from lib.config_manager import ConfigManager
import common_services
from common_services import CommonServices
class HypverMigrator:
def __init__(self, confMgr):
HYPERVISOR_TYPE = 'hyperv'
defaultHypervConfig =[
('HYPERVISOR', 'hypervisorType', HYPERVISOR_TYPE),
('HYPERVISOR', 'migration_input_file', './input/migrate_hyperv_input.json'),
('HYPERVISOR', 'pscp_exe', 'C:\pscp.exe'),
('HYPERVISOR', 'log_file', './logs/hyperv_api.log'),
('STATE', 'active_migration', 'False')
]
self.confMgr = confMgr
if not self.confMgr:
configFile = './settings-' + HYPERVISOR_TYPE + '.conf'
persistentStore = './running-'+HYPERVISOR_TYPE+'.conf'
self.confMgr = ConfigManager(configFile, persistentStore, defaultHypervConfig)
self.confMgr.addOptionsToSection('CLOUDSTACK', lib.cloudstack.getCloudStackConfig()) #let's put all the running configs in the same persistent store
self.log = common_services.createMigrationLog(self.confMgr)
self.commonService = CommonServices(self.confMgr)
def updateVms(self, vms):
self.confMgr.updateOptions([('STATE', 'vms', vms)], True)
def get_vm_raw_from_src(self, vm_in):
# make sure the minimum fields were entered and they have not been processed already
if 'hyperv_vm_name' in vm_in and 'hyperv_server' in vm_in:
objs, ok = hyperv.powershell('Get-VM -Name "%s" -Server "%s"' % (vm_in['hyperv_vm_name'], vm_in['hyperv_server']))
if objs and ok: # make sure it found the specified VM
return objs[0]
def get_vm_info(self, vm_id, vm_in, vm_raw):
if not vm_raw: # vm_raw will be null if we are not running discovery process, instead we are just reading the an input file for vms to be migrated.
vm_raw = get_vm_raw_from_src(vm_in)
if vm_raw: # make sure it found the specified VM
self.log.info('\nGETTING VM INFO %s\n%s' % (vm_in['hyperv_vm_name'], '----------'+'-'*len(vm_in['hyperv_vm_name'])))
vm_out = vm_in
vm_out['id'] = vm_id
vm_out['src_name'] = vm_raw['ElementName']
vm_out['src_type'] = vm_raw['ElementName']
# get cores, cpus
cpu, ok = hyperv.powershell('Get-VMCPUCount -VM "%s" -Server "%s"' % (vm_in['hyperv_vm_name'], vm_in['hyperv_server']))
if ok:
vm_out['src_cpus'] = int(cpu[0]['ProcessorsPerSocket']) * int(cpu[0]['SocketCount'])
else:
self.handleError('Get-VMCPUCount powershell command failed on %s' % (vm_in['hyperv_vm_name']))
self.handleError('ERROR: Check the "%s" log for details' % (self.confMgr.get('HYPERVISOR', 'log_file')))
# get memory
memory, ok = hyperv.powershell('Get-VMMemory -VM "%s" -Server "%s"' % (vm_in['hyperv_vm_name'], vm_in['hyperv_server']))
if ok:
vm_out['src_memory'] = int(memory[0]['Reservation'])
else:
self.handleError('Get-VMMemory powershell command failed on %s' % (vm_in['hyperv_vm_name']))
self.handleError('ERROR: Check the "%s" log for details' % (self.confMgr.get('HYPERVISOR', 'log_file')))
# record their starting state and bring down if running
if int(vm_raw['EnabledState']) == HyperV.VM_RUNNING:
vm_out['state'] = 'running'
self.log.info('VM %s is Running' % (vm_in['hyperv_vm_name']))
elif int(vm_raw['EnabledState']) == HyperV.VM_STOPPED:
vm_out['state'] = 'stopped'
self.log.info('VM %s is Stopped' % (vm_in['hyperv_vm_name']))
else: # this should be improved...
vm_out['state'] = 'unknown'
self.handleError('VM %s is in an Unknown state' % (vm_in['hyperv_vm_name']))
if (vm_out['state'] == 'running' and ok) or vm_out['state'] == 'stopped':
disks, ok = hyperv.powershell('Get-VMDisk -VM "%s"' % (vm_in['hyperv_vm_name']))
if ok:
# if 'src_disks' not in vms[vm_id] or (
# 'src_disks' in vms[vm_id] and len(vms[vm_id]['src_disks']) != len(disks)):
vm_out['src_disks'] = []
for disk in disks:
if 'DriveName' in disk and disk['DriveName'] == 'Hard Drive' and 'DiskImage' in disk:
vm_out['src_disks'].append({
'size': '0',
'label': disk['DriveName'],
'path': disk['DiskImage'], # the src path
'name':ntpath.split(disk['DiskImage'])[1].replace(' ', '-').split('.')[0],
'url':'%s://%s:%s%s%s' % (
'https' if self.confMgr.get('FILESERVER', 'port') == '443' else 'http',
self.confMgr.get('FILESERVER', 'host'),
self.confMgr.get('FILESERVER', 'port'),
self.confMgr.get('FILESERVER', 'base_uri'),
ntpath.split(disk['DiskImage'])[1].replace(' ', '-')
)
})
else:
self.handleError('Get-VMDisk powershell command failed on %s' % (vm_in['hyperv_vm_name']))
self.handleError('ERROR: Check the "%s" log for details' % (self.confMgr.get('HYPERVISOR', 'log_file')))
vm_out['migrationState'] = ''
return vm_out
def discover_vms(self):
self.confMgr.refresh()
if self.confMgr.has_option('STATE', 'vms'):
# initialize the 'vms' variable from the existing config...
vms = json.loads(self.confMgr.get('STATE', 'vms'))
else:
vms = {}
if self.confMgr.has_option('STATE', 'vm_order'):
order = json.loads(self.confMgr.get('STATE', 'vm_order'))
else:
order = []
with open(self.confMgr.get('HYPERVISOR', 'log_file'), 'a') as f:
f.write('\n\nDISCOVERING HYPERV...\n')
discovered = [] # vms of this discovery. we will remove the vm's from 'vms' later if they are not in this array.
vm_input = {}
self.log.info('\n-----------------------\n-- discovering vms... --\n-----------------------')
# collect data about the VMs from HyperV and populate a list of VMs
HypervHost = 'HYPERV1'
objs, ok = hyperv.powershell('Get-VM -Server "%s" ' % (HypervHost))
if objs and ok:
# self.log.info('\nGETTING VM INFO %s\n%s' % (vm_in['hyperv_vm_name'], '----------'+'-'*len(vm_in['hyperv_vm_name'])))
for hypervObj in objs: # loop through the vms in the file
vm_in = {}
self.log.info(hypervObj)
vm_in['hyperv_server'] = HypervHost
vm_in['hyperv_vm_name'] = hypervObj['ElementName']
vm_id = hashlib.sha1(vm_in['hyperv_server']+"|"+vm_in['hyperv_vm_name']).hexdigest()
self.log.info("............vm_id is %s" % vm_id)
if vm_id not in order:
self.log.info("............vm_id %s not in order %s" % (vm_id, order))
order.append(vm_id)
if vm_id not in vms:
self.log.info("............vm_id %s not in vms %s" % (vm_id, vms))
vms[vm_id] = {}
vms[vm_id].update(self.get_vm_info(vm_id, vm_in, hypervObj))
discovered.append(vm_id)
# loop through the 'vms' and remove any that were not discovered in this pass...
for vm_id in vms.keys():
if vm_id not in discovered:
del vms[vm_id] # no longer a valid VM, so remove it...
if vm_id in order: # remove the vm from the order list as well if it exists...
order.remove(vm_id)
### Update the running-hyperv.conf file
self.confMgr.updateOptions([('STATE', 'vms', vms), ('STATE', 'vm_order', order)], True)
self.confMgr.updateRunningConfig()
return vms, order
def discover_vms_from_input_files(self):
self.confMgr.refresh()
if self.confMgr.has_option('STATE', 'vms'):
# initialize the 'vms' variable from the existing config...
vms = json.loads(self.confMgr.get('STATE', 'vms'))
else:
vms = {}
if self.confMgr.has_option('STATE', 'vm_order'):
order = json.loads(self.confMgr.get('STATE', 'vm_order'))
else:
order = []
with open(self.confMgr.get('HYPERVISOR', 'log_file'), 'a') as f:
f.write('\n\nDISCOVERING HYPERV...\n')
discovered = [] # vms of this discovery. we will remove the vm's from 'vms' later if they are not in this array.
vm_input = {}
if os.path.exists(self.confMgr.get('HYPERVISOR', 'migration_input_file')):
with open(self.confMgr.get('HYPERVISOR', 'migration_input_file'), 'r') as f:
try:
vm_input = json.load(f)
except:
self.log.info(sys.exc_info())
sys.exit("Error in the formatting of '%s'" % (self.confMgr.get('HYPERVISOR', 'migration_input_file')))
self.log.info('\n-----------------------\n-- discovering vms... --\n-----------------------')
# collect data about the VMs from HyperV and populate a list of VMs
if vm_input: # make sure there is data in the file
for vm_key in vm_input: # loop through the vms in the file
vm_in = vm_input[vm_key]
self.log.info(vm_in)
vm_id = hashlib.sha1(vm_in['hyperv_server']+"|"+vm_in['hyperv_vm_name']).hexdigest()
self.log.info("............vm_id is %s" % vm_id)
if vm_id not in order:
self.log.info("............vm_id %s not in order %s" % (vm_id, order))
order.append(vm_id)
if vm_id not in vms:
self.log.info("............vm_id %s not in vms %s" % (vm_id, vms))
vms[vm_id] = {}
vms[vm_id].update(self.get_vm_info(vm_id, vm_in, None))
discovered.append(vm_id)
# loop through the 'vms' and remove any that were not discovered in this pass...
for vm_id in vms.keys():
if vm_id not in discovered:
del vms[vm_id] # no longer a valid VM, so remove it...
if vm_id in order: # remove the vm from the order list as well if it exists...
order.remove(vm_id)
### Update the running-hyperv.conf file
self.confMgr.updateOptions([('STATE', 'vms', vms), ('STATE', 'vm_order', order)], True)
self.confMgr.updateRunningConfig()
return vms, order
# copy vhd files to the file server
def copy_vhd_to_file_server(self, vhd_path, vhd_name):
return hyperv.powershell('%s -l %s -pw %s "%s" %s:%s/%s' % (
self.confMgr.get('HYPERVISOR', 'pscp_exe'),
self.confMgr.get('FILESERVER', 'username'),
self.confMgr.get('FILESERVER', 'password'),
vhd_path,
self.confMgr.get('FILESERVER', 'host'),
self.confMgr.get('FILESERVER', 'files_path'),
vhd_name
))
def export_vm(self, vm_id):
self.log.info('\n-----------------------\n-- RUNNING VM EXPORT --\n-----------------------')
self.confMgr.refresh()
if not self.confMgr.getboolean('STATE', 'migrate_error'):
# initialize the 'vms' variable from the existing config...
vms = json.loads(self.confMgr.get('STATE', 'vms'))
self.log.info('EXPORTING %s' % (vms[vm_id]['src_name']))
vms[vm_id]['clean_name'] = re.sub('[^0-9a-zA-Z]+', '-', vms[vm_id]['src_name']).strip('-')
# make sure the minimum fields were entered and they have not been processed already
if vms[vm_id]['state'] == 'running' or vms[vm_id]['state'] == 'stopped':
exported = False
for disk in vms[vm_id]['src_disks']:
if 'label' in disk and disk['label'] == 'Hard Drive' and 'path' in disk:
self.log.info('Copying drive %s' % (disk['path']))
result, ok = self.copy_vhd_to_file_server(disk['path'], ntpath.split(disk['path'])[1].replace(' ', '-'))
if ok:
self.log.info('Finished copy...')
exported = True
else:
self.handleError('Copy failed...')
self.handleError('ERROR: Check the "%s" log for details' % (self.confMgr.get('HYPERVISOR', 'log_file')))
else:
self.log.info('No label/path (DiskName/DiskImage) field or no label is not Hard Drive for disk: %s' % (disk))
# bring the machines back up that were running now that we copied their disks
if vms[vm_id]['state'] == 'running':
status, ok = hyperv.powershell('Start-VM -VM "%s" -Server "%s" -Wait -Force' % (vms[vm_id]['hyperv_vm_name'], vms[vm_id]['hyperv_server']))
if ok:
self.log.info('Re-Started VM %s' % (vms[vm_id]['hyperv_vm_name']))
else:
self.handleError('Failed to restart the server.')
self.handleError('ERROR: Check the "%s" log for details' % (self.confMgr.get('HYPERVISOR', 'log_file')))
if exported:
self.log.info('Finished exporting %s' % (vms[vm_id]['hyperv_vm_name']))
### Update the running-hyperv.conf file
self.confMgr.refresh()
vms[vm_id]['migrationState'] = 'exported'
self.updateVms(vms)
self.confMgr.updateRunningConfig()
self.log.info("\nCurrent VM Objects:")
self.log.info(vms[vm_id])
def import_vm(self, vm_id):
self.log.info('\n\n-----------------------\n-- RUNNING VM IMPORT --\n-----------------------')
vms = json.loads(self.confMgr.get('STATE', 'vms'))
self.log.info("migrationState is: %s" % (vms[vm_id]['migrationState']))
if vms[vm_id]['migrationState'] == 'exported':
self.log.info('IMPORTING %s\n%s' % (vms[vm_id]['hyperv_vm_name'], '----------'+'-'*len(vms[vm_id]['hyperv_vm_name'])))
imported = False
## setup the cloudstack details we know (or are using defaults for)
# we always have cs_zone... and this is not there vmware migrate, so we will retire this for now
# if 'cs_zone' not in vms[vm_id] and self.confMgr.has_option('CLOUDSTACK', 'default_zone'):
# vms[vm_id]['cs_zone'] = self.confMgr.get('CLOUDSTACK', 'default_zone')
# zone = cs.request(dict({'command':'listZones', 'id':vms[vm_id]['cs_zone']}))
# if zone and 'zone' in zone and len(zone['zone']) > 0:
# if zone['zone'][0]['networktype'] == 'Basic':
# vms[vm_id]['cs_zone_network'] = 'basic'
# else:
# vms[vm_id]['cs_zone_network'] = 'advanced'
if 'cs_domain' not in vms[vm_id] and self.confMgr.has_option('CLOUDSTACK', 'default_domain'):
vms[vm_id]['cs_domain'] = self.confMgr.get('CLOUDSTACK', 'default_domain')
if 'cs_account' not in vms[vm_id] and self.confMgr.has_option('CLOUDSTACK', 'default_account'):
vms[vm_id]['cs_account'] = self.confMgr.get('CLOUDSTACK', 'default_account')
if 'cs_network' not in vms[vm_id] and self.confMgr.has_option('CLOUDSTACK', 'default_network'):
vms[vm_id]['cs_network'] = self.confMgr.get('CLOUDSTACK', 'default_network')
# if 'cs_additional_networks' not in vms[vm_id] and self.confMgr.has_option('CLOUDSTACK', 'additional_networks'):
# vms[vm_id]['cs_additional_networks'] = self.confMgr.get('CLOUDSTACK', 'additional_networks')
if 'cs_service_offering' not in vms[vm_id] and self.confMgr.has_option('CLOUDSTACK', 'default_service_offering'):
vms[vm_id]['cs_service_offering'] = self.confMgr.get('CLOUDSTACK', 'default_service_offering')
# make sure we have a complete config before we start
if ('cs_zone' in vms[vm_id] and 'cs_domain' in vms[vm_id] and 'cs_account' in vms[vm_id] and 'cs_network' in vms[vm_id] and 'cs_service_offering' in vms[vm_id]):
# manage the disks
if 'src_disks' in vms[vm_id] and len(vms[vm_id]['src_disks']) > 0:
# register the first disk as a template since it is the root disk
self.log.info('Creating template for root volume \'%s\'...' % (vms[vm_id]['src_disks'][0]['name']))
template = cs.request(dict({
'command':'registerTemplate',
'name':vms[vm_id]['src_disks'][0]['name'].replace(' ', '-'),
'displaytext':vms[vm_id]['src_disks'][0]['name'],
'format':'VHD',
'hypervisor':'Hyperv',
'ostypeid':'138', # None
'url':vms[vm_id]['src_disks'][0]['url'],
'zoneid':vms[vm_id]['cs_zone'],
'domainid':vms[vm_id]['cs_domain'],
'account':vms[vm_id]['cs_account']
}))
if template:
self.log.info('Template \'%s\' created...' % (template['template'][0]['id']))
vms[vm_id]['cs_template_id'] = template['template'][0]['id']
imported = True
else:
self.handleError('ERROR: Check the "%s" log for details' % (self.confMgr.get('CLOUDSTACK', 'log_file')))
# check if there are data src_disks
if len(vms[vm_id]['src_disks']) > 1:
# upload the remaining src_disks as volumes
for disk in vms[vm_id]['src_disks'][1:]:
imported = False # reset because we have more to do...
self.log.info('Uploading data volume \'%s\'...' % (disk['name']))
volume = cs.request(dict({
'command':'uploadVolume',
'name':disk['name'].replace(' ', '-'),
'format':'VHD',
'url':disk['url'],
'zoneid':vms[vm_id]['cs_zone'],
'domainid':vms[vm_id]['cs_domain'],
'account':vms[vm_id]['cs_account']
}))
if volume and 'jobresult' in volume and 'volume' in volume['jobresult']:
volume_id = volume['jobresult']['volume']['id']
self.log.info('Volume \'%s\' uploaded...' % (volume_id))
if 'cs_volumes' in vms[vm_id]:
vms[vm_id]['cs_volumes'].append(volume_id)
else:
vms[vm_id]['cs_volumes'] = [volume_id]
imported = True
else:
self.handleError('ERROR: Check the "%s" log for details' % (self.confMgr.get('CLOUDSTACK', 'log_file')))
else:
self.handleError('We are missing settings fields for %s' % (vms[vm_id]['hyperv_vm_name']))
if imported:
### Update the running-hyperv.conf file
self.confMgr.refresh()
vms[vm_id]['migrationState'] = 'imported'
self.updateVms(vms)
self.confMgr.updateRunningConfig()
# run the actual migration
def launch_vm(self, vm_id):
self.log.info('\n\n----------------------------\n-- LAUNCHING IMPORTED VMS --\n----------------------------')
# go through the imported VMs and start them and attach their volumes if they have any
self.confMgr.refresh()
if not self.confMgr.getboolean('STATE', 'migrate_error'):
vms = json.loads(self.confMgr.get('STATE', 'vms'))
if vms[vm_id]['migrationState'] != 'imported':
self.handleError("Error: VM %d cannot be launched as it is not yet imported. Skipping the launch process..." % vm_id)
return
self.log.info('LAUNCHING %s' % (vms[vm_id]['clean_name']))
poll = 1
has_error = False
self.log.info("migrationState is: %s" % (vms[vm_id]['migrationState']))
while not has_error and vms[vm_id]['migrationState'] != 'launched':
# for i, vm in enumerate(vms):
# vm_id = hashlib.sha1(vm['hyperv_server']+"|"+vm['hyperv_vm_name']).hexdigest()
isAVm = 'cs_service_offering' in vms[vm_id]
self.log.info("__________%s is a vm: %s________________________" % (vm_id, isAVm))
if 'cs_service_offering' in vms[vm_id]:
self.log.info("__________processing vm: %s________________________" % vm_id)
# check if the template has finished downloading...
template = cs.request(dict({
'command':'listTemplates',
'listall':'true',
'templatefilter':'self',
'id':vms[vm_id]['cs_template_id']
}))
if template and 'template' in template and len(template['template']) > 0:
if template['template'][0]['isready']: # template is ready
volumes_ready = True
if 'cs_volumes' in vms[vm_id] and len(vms[vm_id]['cs_volumes']) > 0: # check if volumes are ready
for volume_id in vms[vm_id]['cs_volumes']:
volume = cs.request(dict({
'command':'listVolumes',
'listall':'true',
'id':volume_id
}))
if volume and 'volume' in volume and len(volume['volume']) > 0:
# check the state of the volume
if volume['volume'][0]['state'] != 'Uploaded' and volume['volume'][0]['state'] != 'Ready':
self.log.info('%s: %s is waiting for volume \'%s\', current state: %s' %
(poll, vms[vm_id]['hyperv_vm_name'], volume['volume'][0]['name'], volume['volume'][0]['state']))
volumes_ready = False
else:
volumes_ready = volumes_ready and True # propogates False if any are False
# everything should be ready for this VM to be started, go ahead...
if volumes_ready:
self.log.info('%s: %s is ready to launch...' % (poll, vms[vm_id]['hyperv_vm_name']))
self.log.info('Launching VM \'%s\'...' % (vms[vm_id]['hyperv_vm_name'].replace(' ', '-')))
# create a VM instance using the template
requestedIpAddress = vms[vm_id]['cs_ip_address']
if (requestedIpAddress and len(requestedIpAddress.strip()) > 0):
cmd = dict({
'command':'deployVirtualMachine',
'displayname':vms[vm_id]['hyperv_vm_name'].replace(' ', '-').replace('_', '-'),
'templateid':vms[vm_id]['cs_template_id'],
'serviceofferingid':vms[vm_id]['cs_service_offering'],
'zoneid':vms[vm_id]['cs_zone'],
'domainid':vms[vm_id]['cs_domain'],
'ipaddress':vms[vm_id]['cs_ip_address'],
'account':vms[vm_id]['cs_account']
})
else:
cmd = dict({
'command':'deployVirtualMachine',
'displayname':vms[vm_id]['hyperv_vm_name'].replace(' ', '-').replace('_', '-'),
'templateid':vms[vm_id]['cs_template_id'],
'serviceofferingid':vms[vm_id]['cs_service_offering'],
'zoneid':vms[vm_id]['cs_zone'],
'domainid':vms[vm_id]['cs_domain'],
'account':vms[vm_id]['cs_account']
})
# if vms[vm_id]['cs_zone_network'] == 'advanced': # advanced: so pass the networkids too
if 'cs_network' in vms[vm_id] and vms[vm_id]['cs_network'] != '':
# all_networkIds = [vms[vm_id]['cs_network'], vms[vm_id]['cs_additional_networks']]
all_networkIds = vms[vm_id]['cs_network']
cmd['networkids'] = ",".join(all_networkIds)
self.log.info("_____networks: %s_________" % cmd['networkids'])
cs_vm = cs.request(cmd) # launch the VM
if cs_vm and 'jobresult' in cs_vm and 'virtualmachine' in cs_vm['jobresult']:
# attach the data volumes to it if there are data volumes
if 'cs_volumes' in vms[vm_id] and len(vms[vm_id]['cs_volumes']) > 0:
for volume_id in vms[vm_id]['cs_volumes']:
self.log.info('Attaching vol:%s to vm:%s ...' % (volume_id, cs_vm['jobresult']['virtualmachine']['id']))
attach = cs.request(dict({
'id':volume_id,
'command':'attachVolume',
'virtualmachineid':cs_vm['jobresult']['virtualmachine']['id']
}))
if attach and 'jobstatus' in attach and attach['jobstatus']:
self.log.info('Successfully attached volume %s' % (volume_id))
else:
self.handleError('Failed to attach volume %s' % (volume_id))
has_error = True
self.confMgr.refresh()
self.confMgr.updateOptions([('STATE', 'migrate_error', 'True')])
self.updateVms(vms)
self.confMgr.updateRunningConfig()
if not has_error:
self.log.info('Rebooting the VM to make the attached volumes visible...')
reboot = cs.request(dict({
'command':'rebootVirtualMachine',
'id':cs_vm['jobresult']['virtualmachine']['id']}))
if reboot and 'jobstatus' in reboot and reboot['jobstatus']:
self.log.info('VM rebooted')
else:
self.handleError('VM did not reboot. Check the VM to make sure it came up correctly.')
if not has_error:
### Update the running-hyperv.conf file
self.confMgr.refresh() # make sure we have everything from this file already
vms[vm_id]['cs_vm_id'] = cs_vm['jobresult']['virtualmachine']['id']
vms[vm_id]['migrationState'] = 'launched'
if (requestedIpAddress):
launchedIpAddress = cs_vm['jobresult']['virtualmachine']['nic'][0]['ipaddress']
print("IP address %s:%s ==> %s:%s. " % (vm_id, requestedIpAddress, vms[vm_id]['cs_vm_id'], launchedIpAddress))
self.log.info("IP address %s:%s ==> %s:%s. " % (vm_id, requestedIpAddress, vms[vm_id]['cs_vm_id'], launchedIpAddress))
if (launchedIpAddress != requestedIpAddress):
self.handleError("VM %s is launched with IP address: %s (not with %s)" % (vms[vm_id]['cs_vm_id'], launchedIpAddress, requestedIpAddress))
self.updateVms(vms)
self.confMgr.updateRunningConfig()
elif cs_vm and 'jobresult' in cs_vm and 'errortext' in cs_vm['jobresult']:
self.handleError('%s failed to start! ERROR: %s' % (vms[vm_id]['hyperv_vm_name'], cs_vm['jobresult']['errortext']))
has_error = True
else:
self.handleError('%s did not Start or Error correctly...' % (vms[vm_id]['hyperv_vm_name']))
has_error = True
else:
if ('status' in template['template'][0]):
self.log.info('%s: %s is waiting for template, current state: %s'% (poll, vms[vm_id]['clean_name'], template['template'][0]['status']))
else:
has_error = True
self.handleError('%s: %s is waiting for template, current state not known.'% (poll, vms[vm_id]['clean_name']))
self.handleError(template['template'][0])
if vms[vm_id]['migrationState'] != 'launched':
self.log.info('... polling ...')
poll = poll + 1
time.sleep(10)
if not has_error: # complete the migration...
self.confMgr.refresh()
vms = json.loads(self.confMgr.get('STATE', 'vms'))
# save the updated state
vms[vm_id]['migrationState'] = 'migrated'
self.updateVms(vms)
migrate = json.loads(self.confMgr.get('STATE', 'migrate'))
migrate.remove(vm_id)
self.confMgr.updateOptions([('STATE', 'migrate', migrate)])
self.confMgr.updateRunningConfig()
self.log.info('SUCCESSFULLY MIGRATED %s to %s\n\n' % (vms[vm_id]['src_name'], vms[vm_id]['clean_name']))
else:
self.log.info('An error has occured. Skipping the launch process...')
# vms[i]['migrationState'] = 'launched'
### clean up the running-hyperv.conf file...
#os.remove('./running-hyperv.conf')
def handleError(self, e):
self.commonService.handleError(e)
# run the actual migration
def do_migration(self):
try:
self.commonService.beforeMigrationSetup()
self.confMgr.refresh()
vms = json.loads(self.confMgr.get('STATE', 'vms'))
migrate = json.loads(self.confMgr.get('STATE', 'migrate'))
for vm_id in migrate[:]: # makes a copy of the list so we can delete from the original
self.log.info("starting migration for %s. migrationState: %s " % (vm_id, vms[vm_id]['migrationState']))
if self.confMgr.getboolean('STATE', 'migrate_error'):
break
migrationState = vms[vm_id]['migrationState']
if migrationState == '' or migrationState == 'migrated':
self.export_vm(vm_id)
self.import_vm(vm_id)
self.launch_vm(vm_id)
elif migrationState == 'exported':
self.import_vm(vm_id)
self.launch_vm(vm_id)
elif migrationState == 'imported':
self.launch_vm(vm_id)
elif migrationState == 'launched':
self.confMgr.refresh()
vms = json.loads(self.confMgr.get('STATE', 'vms'))
vms[vm_id]['migrationState'] = 'migrated'
migrate.remove(vm_id)
self.confMgr.updateOptions([('STATE', 'vms', vms), ('STATE', 'migrate', migrate)], True)
self.confMgr.updateRunningConfig()
except Exception as e:
self.handleError(e)
traceback.print_exc()
self.log.exception("Migration stopped with the following stacktrace:")
finally:
self.commonService.afterMigrationTeardown()
if __name__ == "__main__":
hypverMigrator = HypverMigrator(None)
hypverMigrator.do_migration()
| {'content_hash': 'cd8143331f65faea8a346c281d220637', 'timestamp': '', 'source': 'github', 'line_count': 600, 'max_line_length': 164, 'avg_line_length': 46.22833333333333, 'alnum_prop': 0.5973969787648268, 'repo_name': 'chipchilders/migrate2cs', 'id': 'be4b11c87c046d5e859e65080efd9ea26f51b131', 'size': '27760', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'migrate_hyperv.py', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'ApacheConf', 'bytes': '4114'}, {'name': 'CSS', 'bytes': '5043'}, {'name': 'HTML', 'bytes': '1583013'}, {'name': 'JavaScript', 'bytes': '23926'}, {'name': 'PHP', 'bytes': '26453'}, {'name': 'Python', 'bytes': '96408'}]} |
package com.rhizospherejs.gwt.client;
import com.google.gwt.ajaxloader.client.ExceptionHelper;
import com.google.gwt.core.client.GWT;
import com.rhizospherejs.gwt.client.resources.ResourcesInjector;
import java.util.Collection;
import java.util.LinkedList;
/**
* Loader to inject Rhizosphere javascript libraries into a GWT host page.
* Exposes a callback mechanism to guarantee code execution after successful
* library injection.
*
* @author [email protected] (Riccardo Govoni)
*/
public final class RhizosphereLoader {
/**
* Singleton instance.
*/
private static RhizosphereLoader loader = new RhizosphereLoader();
/**
* @return The singleton loader instance.
*/
public static RhizosphereLoader getInstance() {
return loader;
}
/**
* Whether JS library injection is currently ongoing.
*/
private boolean injecting = false;
/**
* Whether JS library injection has completed and Rhizosphere libraries
* are successfully loaded in a GWT host page.
*/
private boolean loaded = false;
/**
* Whether to use the <a href="http://code.google.com/apis/libraries/">
* Google CDN</a> as possible to load Rhizosphere libraries and its
* dependencies.
*/
private boolean useGoogleCDN = false;
/**
* List of callbacks to execute once Rhizosphere libraries have been loaded.
*/
private Collection<Runnable> callbacks;
/**
* Creates the singleton loader instance.
*/
private RhizosphereLoader() {
callbacks = new LinkedList<Runnable>();
injecting = false;
}
/**
* Uses Google CDN to load Rhizosphere libraries and its dependencies as much
* as possible.
* @param useGoogleCDN Whether the Google CDN should be used as possible to
* load Rhizosphere libraries and its dependencies.
*/
public void setUseGoogleCDN(final boolean useGoogleCDN) {
this.useGoogleCDN = useGoogleCDN;
}
/**
* Registers a callback to be invoked after Rhizosphere libraries have been
* successfully injected into a GWT host page. If Rhizosphere libraries
* have already been injected, the callback immediately executes.
* @param callback The callback to invoke.
*/
public void ensureInjected(final Runnable callback) {
if (loaded) {
GWT.log("Rhizosphere has already been loaded. Firing callback now.");
ExceptionHelper.runProtected(callback);
return;
}
callbacks.add(callback);
if (injecting) {
GWT.log("Rhizosphere is currently loading...");
return;
}
GWT.log("Starting Rhizosphere load sequence.");
injecting = true;
final ResourcesInjector resourcesFactory = GWT.create(ResourcesInjector.class);
resourcesFactory.injectDependenciesCss();
resourcesFactory.injectRhizoCss();
Runnable dependenciesLoadedCallback = new DependenciesLoadedCallback(resourcesFactory);
resourcesFactory.injectDependenciesJavascript(dependenciesLoadedCallback, useGoogleCDN);
}
@SuppressWarnings("unused")
private void rhizosphereReady() {
injecting = false;
runAllCallbacks();
callbacks.clear();
loaded = true;
}
private void runAllCallbacks() {
GWT.log("Rhizosphere loading complete. Firing " + callbacks.size() + " callbacks.");
for (Runnable r : callbacks) {
ExceptionHelper.runProtected(r);
}
}
/**
* Callback invoked after Rhizosphere dependencies have completed loading.
*/
private class DependenciesLoadedCallback implements Runnable {
private ResourcesInjector resourcesFactory;
public DependenciesLoadedCallback(ResourcesInjector resourcesFactory) {
this.resourcesFactory = resourcesFactory;
}
@Override
public void run() {
resourcesFactory.injectRhizoJavascript(new LoadCompleteCallback());
}
}
/**
* Callback invoked after all Rhizosphere libraries and its dependencies have
* completed loading.
*/
private class LoadCompleteCallback implements Runnable {
@Override
public void run() {
RhizosphereLoader.this.rhizosphereReady();
}
}
}
| {'content_hash': 'cfd84523b637454229c22b9072385f8c', 'timestamp': '', 'source': 'github', 'line_count': 148, 'max_line_length': 92, 'avg_line_length': 27.4527027027027, 'alnum_prop': 0.7164656657642137, 'repo_name': 'battlehorse/rhizosphere', 'id': '1091cf9f4d0ca337d26a27394666ad5a34f697ec', 'size': '4674', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'gwt/src/com/rhizospherejs/gwt/client/RhizosphereLoader.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Java', 'bytes': '342958'}, {'name': 'JavaScript', 'bytes': '1323000'}, {'name': 'Python', 'bytes': '59124'}]} |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (1.8.0_252) on Fri May 15 17:47:36 UTC 2020 -->
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<title>Uses of Class org.fax4j.spi.vbs.VBSFaxClientSpi.FaxClientSpiConfigurationConstants (fax4j 0.45.0 API)</title>
<meta name="date" content="2020-05-15">
<link rel="stylesheet" type="text/css" href="../../../../../stylesheet.css" title="Style">
<script type="text/javascript" src="../../../../../script.js"></script>
</head>
<body>
<script type="text/javascript"><!--
try {
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Class org.fax4j.spi.vbs.VBSFaxClientSpi.FaxClientSpiConfigurationConstants (fax4j 0.45.0 API)";
}
}
catch(err) {
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar.top">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.top.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../org/fax4j/spi/vbs/VBSFaxClientSpi.FaxClientSpiConfigurationConstants.html" title="enum in org.fax4j.spi.vbs">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../../../../../overview-tree.html">Tree</a></li>
<li><a href="../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../index.html?org/fax4j/spi/vbs/class-use/VBSFaxClientSpi.FaxClientSpiConfigurationConstants.html" target="_top">Frames</a></li>
<li><a href="VBSFaxClientSpi.FaxClientSpiConfigurationConstants.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="header">
<h2 title="Uses of Class org.fax4j.spi.vbs.VBSFaxClientSpi.FaxClientSpiConfigurationConstants" class="title">Uses of Class<br>org.fax4j.spi.vbs.VBSFaxClientSpi.FaxClientSpiConfigurationConstants</h2>
</div>
<div class="classUseContainer">
<ul class="blockList">
<li class="blockList">
<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing packages, and an explanation">
<caption><span>Packages that use <a href="../../../../../org/fax4j/spi/vbs/VBSFaxClientSpi.FaxClientSpiConfigurationConstants.html" title="enum in org.fax4j.spi.vbs">VBSFaxClientSpi.FaxClientSpiConfigurationConstants</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Package</th>
<th class="colLast" scope="col">Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><a href="#org.fax4j.spi.vbs">org.fax4j.spi.vbs</a></td>
<td class="colLast">
<div class="block">Provides the VB script based fax client SPI implementation.</div>
</td>
</tr>
</tbody>
</table>
</li>
<li class="blockList">
<ul class="blockList">
<li class="blockList"><a name="org.fax4j.spi.vbs">
<!-- -->
</a>
<h3>Uses of <a href="../../../../../org/fax4j/spi/vbs/VBSFaxClientSpi.FaxClientSpiConfigurationConstants.html" title="enum in org.fax4j.spi.vbs">VBSFaxClientSpi.FaxClientSpiConfigurationConstants</a> in <a href="../../../../../org/fax4j/spi/vbs/package-summary.html">org.fax4j.spi.vbs</a></h3>
<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation">
<caption><span>Methods in <a href="../../../../../org/fax4j/spi/vbs/package-summary.html">org.fax4j.spi.vbs</a> that return <a href="../../../../../org/fax4j/spi/vbs/VBSFaxClientSpi.FaxClientSpiConfigurationConstants.html" title="enum in org.fax4j.spi.vbs">VBSFaxClientSpi.FaxClientSpiConfigurationConstants</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Method and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><code>static <a href="../../../../../org/fax4j/spi/vbs/VBSFaxClientSpi.FaxClientSpiConfigurationConstants.html" title="enum in org.fax4j.spi.vbs">VBSFaxClientSpi.FaxClientSpiConfigurationConstants</a></code></td>
<td class="colLast"><span class="typeNameLabel">VBSFaxClientSpi.FaxClientSpiConfigurationConstants.</span><code><span class="memberNameLink"><a href="../../../../../org/fax4j/spi/vbs/VBSFaxClientSpi.FaxClientSpiConfigurationConstants.html#valueOf-java.lang.String-">valueOf</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> name)</code>
<div class="block">Returns the enum constant of this type with the specified name.</div>
</td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>static <a href="../../../../../org/fax4j/spi/vbs/VBSFaxClientSpi.FaxClientSpiConfigurationConstants.html" title="enum in org.fax4j.spi.vbs">VBSFaxClientSpi.FaxClientSpiConfigurationConstants</a>[]</code></td>
<td class="colLast"><span class="typeNameLabel">VBSFaxClientSpi.FaxClientSpiConfigurationConstants.</span><code><span class="memberNameLink"><a href="../../../../../org/fax4j/spi/vbs/VBSFaxClientSpi.FaxClientSpiConfigurationConstants.html#values--">values</a></span>()</code>
<div class="block">Returns an array containing the constants of this enum type, in
the order they are declared.</div>
</td>
</tr>
</tbody>
</table>
</li>
</ul>
</li>
</ul>
</div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar.bottom">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.bottom.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../org/fax4j/spi/vbs/VBSFaxClientSpi.FaxClientSpiConfigurationConstants.html" title="enum in org.fax4j.spi.vbs">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../../../../../overview-tree.html">Tree</a></li>
<li><a href="../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../index.html?org/fax4j/spi/vbs/class-use/VBSFaxClientSpi.FaxClientSpiConfigurationConstants.html" target="_top">Frames</a></li>
<li><a href="VBSFaxClientSpi.FaxClientSpiConfigurationConstants.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<p class="legalCopy"><small>Copyright © 2009–2020 <a href="https://github.com/sagiegurari/fax4j">fax4j</a>. All rights reserved.</small></p>
</body>
</html>
| {'content_hash': '35d0510ccf902b93d2232439a37d3240', 'timestamp': '', 'source': 'github', 'line_count': 177, 'max_line_length': 444, 'avg_line_length': 46.632768361581924, 'alnum_prop': 0.6699781923915678, 'repo_name': 'sagiegurari/fax4j', 'id': '091967337f205c7d583c78fb7f0453546f83600e', 'size': '8254', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'docs/apidocs/org/fax4j/spi/vbs/class-use/VBSFaxClientSpi.FaxClientSpiConfigurationConstants.html', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Batchfile', 'bytes': '340'}, {'name': 'C', 'bytes': '5384'}, {'name': 'C++', 'bytes': '28759'}, {'name': 'CMake', 'bytes': '42888'}, {'name': 'HTML', 'bytes': '245'}, {'name': 'Java', 'bytes': '1258648'}, {'name': 'Shell', 'bytes': '24'}, {'name': 'VBScript', 'bytes': '5882'}]} |
package org.spockframework.runtime.model;
import org.spockframework.runtime.GroovyRuntimeUtil;
import spock.lang.Shared;
import java.lang.reflect.*;
/**
* Runtime information about a field in a Spock specification.
*
* @author Peter Niederwieser
*/
public class FieldInfo extends NodeInfo<SpecInfo, Field> {
private int ordinal;
private boolean initializer;
public int getOrdinal() {
return ordinal;
}
public void setOrdinal(int ordinal) {
this.ordinal = ordinal;
}
public Class<?> getType() {
return getReflection().getType();
}
public boolean isStatic() {
return Modifier.isStatic(getReflection().getModifiers());
}
public boolean isShared() {
return isAnnotationPresent(Shared.class);
}
public boolean hasInitializer() {
return initializer;
}
public void setHasInitializer(boolean initializer) {
this.initializer = initializer;
}
public Object readValue(Object target) {
return GroovyRuntimeUtil.getProperty(target, getReflection().getName());
}
public void writeValue(Object target, Object value) {
GroovyRuntimeUtil.setProperty(target, getReflection().getName(), value);
}
}
| {'content_hash': 'f8fa7a3fa8f333f7043e7b7e4517f168', 'timestamp': '', 'source': 'github', 'line_count': 54, 'max_line_length': 76, 'avg_line_length': 21.74074074074074, 'alnum_prop': 0.721465076660988, 'repo_name': 'spockframework/spock', 'id': 'a1a2f01a358bade4a91dae83b411155e7bc0a919', 'size': '1789', 'binary': False, 'copies': '2', 'ref': 'refs/heads/master', 'path': 'spock-core/src/main/java/org/spockframework/runtime/model/FieldInfo.java', 'mode': '33261', 'license': 'apache-2.0', 'language': [{'name': 'Groovy', 'bytes': '1124541'}, {'name': 'Java', 'bytes': '1514097'}]} |
package org.ovirt.engine.core.branding;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.net.URISyntaxException;
import java.util.List;
import java.util.Locale;
import java.util.ResourceBundle;
import org.junit.Before;
import org.junit.Test;
/**
* Unit tests for the {@code BrandingTheme} class.
*/
public class BrandingThemeTest {
/**
* The testTheme object.
*/
BrandingTheme testTheme;
@Before
public void setUp() throws Exception {
File testThemeRootPath = new File(this.getClass().getClassLoader().
getResource("./org/ovirt/engine/core/branding").toURI().getPath()); //$NON-NLS-1$
File testThemePath = new File(testThemeRootPath.getAbsoluteFile(), "01-test.brand"); //$NON-NLS-1$
testTheme = new BrandingTheme(testThemePath.getAbsolutePath(),
testThemeRootPath, 1);
assertTrue("The theme should load", testTheme.load()); //$NON-NLS-1$
}
@Test
public void testGetPath() {
assertEquals("Path should be '/01-test.brand'", "/01-test.brand", //$NON-NLS-1$ //$NON-NLS-2$
testTheme.getPath());
}
@Test
public void testGetThemeStyleSheet() {
assertEquals("User portal style sheet: 'user_portal.css'", "user_portal.css", //$NON-NLS-1$ //$NON-NLS-2$
testTheme.getThemeStyleSheet("userportal"));
assertEquals("Wedadmin style sheet: 'web_admin.css'", "web_admin.css", //$NON-NLS-1$ //$NON-NLS-2$
testTheme.getThemeStyleSheet("webadmin"));
}
@Test
public void testGetMessagesBundle() {
List<ResourceBundle> bundle = testTheme.getMessagesBundle();
assertNotNull("There should be a bundle", bundle); //$NON-NLS-1$
assertEquals("Login header", bundle.get(0).getString("obrand.common.login_header_label")); //$NON-NLS-1$ //$NON-NLS-2$
}
@Test
public void testGetMessagesBundleLocale() {
List<ResourceBundle> bundle = testTheme.getMessagesBundle(Locale.FRENCH);
assertNotNull("There should be a bundle", bundle); //$NON-NLS-1$
assertEquals("Login header(fr)", bundle.get(0).getString("obrand.common.login_header_label")); //$NON-NLS-1$ //$NON-NLS-2$
}
@Test
public void testGetCascadingResource() {
assertTrue("getCascadingResource not reading file from resources.properties", //$NON-NLS-1$
testTheme.getCascadingResource("favicon").getFile().getAbsolutePath().contains("/01-test.brand/images/favicon.ico")); //$NON-NLS-1$ //$NON-NLS-2$
assertTrue("getCascadingResource not reading contentType from resources.properties", //$NON-NLS-1$
testTheme.getCascadingResource("favicon").getContentType().equals("someMadeUp/contentType")); //$NON-NLS-1$ //$NON-NLS-2$
}
@Test
public void testGetCascadingResource_missingKey() {
assertNull("getCascadingResource not using resources.properties properly", //$NON-NLS-1$
testTheme.getCascadingResource("this_is_not_a_valid_key")); //$NON-NLS-1$
}
@Test
public void testGetCascadingResource_missingResourcesFile() throws URISyntaxException {
File testThemeRootPath = new File(this.getClass().getClassLoader().
getResource("./org/ovirt/engine/core/branding") //$NON-NLS-1$
.toURI().getPath());
// theme 4 is purposely missing a resources.properties file
File testThemePath = new File(testThemeRootPath.getAbsoluteFile(), "04-test4.brand"); //$NON-NLS-1$
BrandingTheme theme4 = new BrandingTheme(testThemePath.getAbsolutePath(),
testThemeRootPath, 1); //$NON-NLS-1$
assertTrue("Theme 4 should load", theme4.load()); //$NON-NLS-1$
assertNull("getCascadingResource not handling missing resources.properties gracefully", //$NON-NLS-1$
theme4.getCascadingResource("this_file_is_missing_anyway")); //$NON-NLS-1$
}
@Test
public void testGetCascadingResource_missingResourcesProperty() throws URISyntaxException {
File testThemeRootPath = new File(this.getClass().getClassLoader().
getResource("./org/ovirt/engine/core/branding") //$NON-NLS-1$
.toURI().getPath());
// theme 5 is purposely missing a resources key in branding.properties
File testThemePath = new File(testThemeRootPath.getAbsoluteFile(), "05-test5.brand"); //$NON-NLS-1$
BrandingTheme theme5 = new BrandingTheme(testThemePath.getAbsolutePath(),
testThemeRootPath, 1); //$NON-NLS-1$
assertTrue("Theme 5 should load", theme5.load()); //$NON-NLS-1$
assertNull("getCascadingResource not handling missing resources key gracefully", //$NON-NLS-1$
theme5.getCascadingResource("this_file_is_missing_anyway")); //$NON-NLS-1$
}
@Test
public void testInvalidTemplateReplaceProperty() throws URISyntaxException {
File testThemeRootPath = new File(this.getClass().getClassLoader().
getResource("./org/ovirt/engine/core/branding") //$NON-NLS-1$
.toURI().getPath());
// theme 6 purposely has an invalid welcome_replace value.
File testThemePath = new File(testThemeRootPath.getAbsoluteFile(), "06-test6.brand"); //$NON-NLS-1$
BrandingTheme theme6 = new BrandingTheme(testThemePath.getAbsolutePath(),
testThemeRootPath, 1); //$NON-NLS-1$
assertFalse("Theme 6 should not load", theme6.load()); //$NON-NLS-1$
}
@Test
public void testTemplateReplaceProperty() throws URISyntaxException {
File testThemeRootPath = new File(this.getClass().getClassLoader().
getResource("./org/ovirt/engine/core/branding") //$NON-NLS-1$
.toURI().getPath());
File testThemePath = new File(testThemeRootPath.getAbsoluteFile(), "01-test.brand"); //$NON-NLS-1$
BrandingTheme theme1 = new BrandingTheme(testThemePath.getAbsolutePath(),
testThemeRootPath, 1); //$NON-NLS-1$
assertTrue("Theme 1 should load", theme1.load()); //$NON-NLS-1$
assertFalse("should replace template should be false", //$NON-NLS-1$
theme1.shouldReplaceWelcomePageSectionTemplate());
}
}
| {'content_hash': '52cc5259ffba4ac3185f8be9993ec88f', 'timestamp': '', 'source': 'github', 'line_count': 135, 'max_line_length': 161, 'avg_line_length': 47.696296296296296, 'alnum_prop': 0.6629911476937412, 'repo_name': 'halober/ovirt-engine', 'id': 'a5541264e5455e004485aa6729b8356b1c6aa5d8', 'size': '6439', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'backend/manager/modules/branding/src/test/java/org/ovirt/engine/core/branding/BrandingThemeTest.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'CSS', 'bytes': '251848'}, {'name': 'Java', 'bytes': '26541598'}, {'name': 'JavaScript', 'bytes': '890'}, {'name': 'Python', 'bytes': '698283'}, {'name': 'Shell', 'bytes': '105362'}, {'name': 'XSLT', 'bytes': '54683'}]} |
<div class="panel panel-default" id="prevTableId">
<div class="panel-heading">
<div class="panel-title">
<h4>Calculate prevalence of attribute</h4>
</div>
</div>
<div class="panel-body">
<table class="table table-striped table-bordered table-hover table-responsive">
<tr>
<th>Select dataset by id:</th>
<td>
<select (change)='loadFilter($event.target.value)'>
<option [value]="-1">Select dataset</option>
<option *ngFor="let dataset of activeDatasets" [style.background-color]="dataset.getColor()"
[value]="dataset.getDatasetId()">
{{dataset.getDatasetId()}}</option>
</select>
</td>
</tr>
<tr>
<th>Set prevalence group by filter:</th>
<td>
<div class="container-fluid" #prevalenceFilterContainer></div>
</td>
</tr>
<tr>
<th>Prevalence:</th>
<td>{{prevalence}}</td>
</tr>
</table>
</div>
</div> | {'content_hash': '979992c6537d1c64d8f321f3045c6c25', 'timestamp': '', 'source': 'github', 'line_count': 32, 'max_line_length': 116, 'avg_line_length': 37.9375, 'alnum_prop': 0.45634266886326197, 'repo_name': 'magneoe/FacilityDiseaseBurden', 'id': '25e7d27ae0e63d5f0265c2ebea4e8632684ef24b', 'size': '1214', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'src/app/views/temporal/prevalenceTable.component.html', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '19393'}, {'name': 'HTML', 'bytes': '12961'}, {'name': 'JavaScript', 'bytes': '465288'}, {'name': 'TypeScript', 'bytes': '109250'}]} |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (version 1.7.0_45) on Mon Dec 23 09:17:12 CST 2013 -->
<title>Class Hierarchy</title>
<meta name="date" content="2013-12-23">
<link rel="stylesheet" type="text/css" href="stylesheet.css" title="Style">
</head>
<body>
<script type="text/javascript"><!--
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Class Hierarchy";
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar_top">
<!-- -->
</a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="package-summary.html">Package</a></li>
<li>Class</li>
<li class="navBarCell1Rev">Tree</li>
<li><a href="deprecated-list.html">Deprecated</a></li>
<li><a href="index-all.html">Index</a></li>
<li><a href="help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="index.html?overview-tree.html" target="_top">Frames</a></li>
<li><a href="overview-tree.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip-navbar_top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="header">
<h1 class="title">Hierarchy For All Packages</h1>
</div>
<div class="contentContainer">
<h2 title="Class Hierarchy">Class Hierarchy</h2>
<ul>
<li type="circle">java.lang.Object
<ul>
<li type="circle"><a href="ArrayWrapper_Test.html" title="class in <Unnamed>"><span class="strong">ArrayWrapper_Test</span></a></li>
<li type="circle">java.lang.ClassLoader
<ul>
<li type="circle">java.security.SecureClassLoader
<ul>
<li type="circle">java.net.URLClassLoader (implements java.io.Closeable)
<ul>
<li type="circle"><a href="RJavaClassLoader.html" title="class in <Unnamed>"><span class="strong">RJavaClassLoader</span></a></li>
</ul>
</li>
</ul>
</li>
</ul>
</li>
<li type="circle"><a href="DummyPoint.html" title="class in <Unnamed>"><span class="strong">DummyPoint</span></a> (implements java.lang.Cloneable)</li>
<li type="circle"><a href="RectangularArrayBuilder_Test.html" title="class in <Unnamed>"><span class="strong">RectangularArrayBuilder_Test</span></a></li>
<li type="circle"><a href="RectangularArrayExamples.html" title="class in <Unnamed>"><span class="strong">RectangularArrayExamples</span></a></li>
<li type="circle"><a href="RJavaArrayIterator.html" title="class in <Unnamed>"><span class="strong">RJavaArrayIterator</span></a>
<ul>
<li type="circle"><a href="ArrayWrapper.html" title="class in <Unnamed>"><span class="strong">ArrayWrapper</span></a></li>
<li type="circle"><a href="RectangularArrayBuilder.html" title="class in <Unnamed>"><span class="strong">RectangularArrayBuilder</span></a></li>
<li type="circle"><a href="RectangularArraySummary.html" title="class in <Unnamed>"><span class="strong">RectangularArraySummary</span></a></li>
</ul>
</li>
<li type="circle"><a href="RJavaArrayTools.html" title="class in <Unnamed>"><span class="strong">RJavaArrayTools</span></a></li>
<li type="circle"><a href="RJavaArrayTools_Test.html" title="class in <Unnamed>"><span class="strong">RJavaArrayTools_Test</span></a></li>
<li type="circle"><a href="RJavaComparator.html" title="class in <Unnamed>"><span class="strong">RJavaComparator</span></a></li>
<li type="circle"><a href="RJavaImport.html" title="class in <Unnamed>"><span class="strong">RJavaImport</span></a> (implements java.io.Serializable)</li>
<li type="circle"><a href="RJavaTools.html" title="class in <Unnamed>"><span class="strong">RJavaTools</span></a></li>
<li type="circle"><a href="RJavaTools_Test.html" title="class in <Unnamed>"><span class="strong">RJavaTools_Test</span></a></li>
<li type="circle"><a href="RJavaTools_Test.DummyNonStaticClass.html" title="class in <Unnamed>"><span class="strong">RJavaTools_Test.DummyNonStaticClass</span></a></li>
<li type="circle">java.lang.Throwable (implements java.io.Serializable)
<ul>
<li type="circle">java.lang.Exception
<ul>
<li type="circle"><a href="ArrayDimensionException.html" title="class in <Unnamed>"><span class="strong">ArrayDimensionException</span></a></li>
<li type="circle"><a href="FlatException.html" title="class in <Unnamed>"><span class="strong">FlatException</span></a></li>
<li type="circle"><a href="NotAnArrayException.html" title="class in <Unnamed>"><span class="strong">NotAnArrayException</span></a></li>
<li type="circle"><a href="NotComparableException.html" title="class in <Unnamed>"><span class="strong">NotComparableException</span></a></li>
<li type="circle"><a href="ObjectArrayException.html" title="class in <Unnamed>"><span class="strong">ObjectArrayException</span></a></li>
<li type="circle"><a href="PrimitiveArrayException.html" title="class in <Unnamed>"><span class="strong">PrimitiveArrayException</span></a></li>
<li type="circle"><a href="RJavaArrayTools.ArrayDimensionMismatchException.html" title="class in <Unnamed>"><span class="strong">RJavaArrayTools.ArrayDimensionMismatchException</span></a></li>
<li type="circle"><a href="RJavaTools_Test.TestException.html" title="class in <Unnamed>"><span class="strong">RJavaTools_Test.TestException</span></a></li>
<li type="circle"><a href="TestException.html" title="class in <Unnamed>"><span class="strong">TestException</span></a></li>
</ul>
</li>
</ul>
</li>
</ul>
</li>
</ul>
</div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar_bottom">
<!-- -->
</a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="package-summary.html">Package</a></li>
<li>Class</li>
<li class="navBarCell1Rev">Tree</li>
<li><a href="deprecated-list.html">Deprecated</a></li>
<li><a href="index-all.html">Index</a></li>
<li><a href="help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="index.html?overview-tree.html" target="_top">Frames</a></li>
<li><a href="overview-tree.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip-navbar_bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
</body>
</html>
| {'content_hash': '0b1ee6d6daedc0f7f22de9fa7379df8b', 'timestamp': '', 'source': 'github', 'line_count': 167, 'max_line_length': 198, 'avg_line_length': 44.31137724550898, 'alnum_prop': 0.6790540540540541, 'repo_name': 'lehoangha/GSOE9712_S115_RA', 'id': 'd598604bb4a5629f1a67064db4217feca4b4bc07', 'size': '7400', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'packrat/lib/x86_64-pc-linux-gnu/3.2.1/rJava/javadoc/overview-tree.html', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'C', 'bytes': '12217'}, {'name': 'C++', 'bytes': '5632835'}, {'name': 'CSS', 'bytes': '43527'}, {'name': 'HTML', 'bytes': '785481'}, {'name': 'Java', 'bytes': '258796'}, {'name': 'Makefile', 'bytes': '1936'}, {'name': 'R', 'bytes': '988606'}, {'name': 'Rebol', 'bytes': '4381'}, {'name': 'Shell', 'bytes': '4747'}, {'name': 'TeX', 'bytes': '19899'}]} |
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintStream;
import java.util.ArrayList;
public class Screen {
static final String ANSI_CLS = "\u001b[2J";
static final String ANSI_HOME = "\u001b[H";
private final SimpleIRCClient parent;
// ANNOTATION: field erasure type
private ArrayList{L /parent.clearHistory T} buffer;
private final PrintStream out;
private final InputStreamReader isr;
private final ArrayList inputBuffer;
private String incomplete;
public Screen(SimpleIRCClient parent) {
this.parent = parent;
this.buffer = new ArrayList();
this.inputBuffer = new ArrayList();
this.out = System.out;
this.isr = new InputStreamReader(System.in);
this.incomplete = "";
}
public String getInput() {
this.bufferInput();
if (!inputBuffer.isEmpty()) {
String input = (String) inputBuffer.get(0);
inputBuffer.remove(0);
return input;
} else {
return null;
}
}
public void bufferInput() {
try {
int c = -1;
while (isr.ready() && (c = isr.read()) != -1) {
if (c == '\n') {
inputBuffer.add(incomplete);
incomplete = "";
} else {
incomplete += (char) c;
}
}
} catch (Throwable e) {}
}
public void flush() {
this.bufferInput();
this.clearScreen();
int drawn = 0;
try {
for (int i = 0; i < buffer.size(); i++) {
// Declassify output to the screen
// ANNOTATION: declassification
out.println(({L})buffer.get(i));
drawn++;
}
} catch (Throwable e) {}
for (int i = drawn; drawn < 20; drawn++) {
out.println();
}
out.print("> " + incomplete);
out.flush();
}
public void writeLine(String line) {
try {
buffer.add(line);
while (buffer.size() > 20) {
buffer.remove(0);
}
} catch (Throwable e) {}
}
public void clearScreen() {
out.print(ANSI_CLS + ANSI_HOME);
out.flush();
}
}
| {'content_hash': 'fc0f08f078736d7234a61bad51b96d17', 'timestamp': '', 'source': 'github', 'line_count': 85, 'max_line_length': 55, 'avg_line_length': 23.388235294117646, 'alnum_prop': 0.5935613682092555, 'repo_name': 'HarvardPL/cryptoerase', 'id': '5bc41d8ae7157533c631e56266ab072abacb8539', 'size': '1988', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'examples/SimpleIRC/Screen.jl', 'mode': '33188', 'license': 'bsd-3-clause', 'language': [{'name': 'Java', 'bytes': '187943'}, {'name': 'Julia', 'bytes': '100100'}, {'name': 'Lex', 'bytes': '20909'}, {'name': 'Shell', 'bytes': '6317'}]} |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Messaging;
using System.Text;
namespace MsmqInt.Mq
{
public delegate void MqReceived<T>(T message);
interface IMq : IDisposable
{
void Send<T>(T msg);
void ReceiveAsync<T>(MqReceived<T> mqReceived);
T Receive<T>(TimeSpan receiveTimeout);
void Init();
}
}
| {'content_hash': '1465aa617f95bd4508e6d580116498fd', 'timestamp': '', 'source': 'github', 'line_count': 17, 'max_line_length': 55, 'avg_line_length': 22.647058823529413, 'alnum_prop': 0.6701298701298701, 'repo_name': 'beyazc/MsmqInt', 'id': '88e3ab1b5e39ba6019ac2323ecf7be9acda29bd7', 'size': '387', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'MsmqInt/Mq/IMq.cs', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'C#', 'bytes': '14790'}]} |
package com.interviews.algo;
import java.util.Hashtable;
public class LRUCache {
class DLinkedNode {
int key;
int value;
DLinkedNode prev;
DLinkedNode next;
}
/**
* Always add the new node right after head;
*/
private void addNode(DLinkedNode node) {
node.prev = head;
node.next = head.next;
head.next.prev = node;
head.next = node;
}
/**
* Remove an existing node from the linked list.
*/
private void removeNode(DLinkedNode node) {
DLinkedNode pre = node.prev;
DLinkedNode post = node.next;
pre.next = post;
post.prev = pre;
}
/**
* Move certain node in between to the head.
*/
private void moveToHead(DLinkedNode node) {
this.removeNode(node);
this.addNode(node);
}
// pop the current tail.
private DLinkedNode popTail() {
DLinkedNode res = tail.prev;
this.removeNode(res);
return res;
}
private Hashtable<Integer, DLinkedNode> cache = new Hashtable<Integer, DLinkedNode>();
private int count;
private int capacity;
private DLinkedNode head, tail;
public LRUCache(int capacity) {
this.count = 0;
this.capacity = capacity;
head = new DLinkedNode();
head.prev = null;
tail = new DLinkedNode();
tail.next = null;
head.next = tail;
tail.prev = head;
}
public int get(int key) {
DLinkedNode node = cache.get(key);
if (node == null) {
return -1; // should raise exception here.
}
// move the accessed node to the head;
this.moveToHead(node);
return node.value;
}
public void set(int key, int value) {
DLinkedNode node = cache.get(key);
if (node == null) {
DLinkedNode newNode = new DLinkedNode();
newNode.key = key;
newNode.value = value;
this.cache.put(key, newNode);
this.addNode(newNode);
++count;
if (count > capacity) {
// pop the tail
DLinkedNode tail = this.popTail();
this.cache.remove(tail.key);
--count;
}
} else {
// update the value.
node.value = value;
this.moveToHead(node);
}
}
}
| {'content_hash': '2ee2f5c7e6797092ba57064b4749b39c', 'timestamp': '', 'source': 'github', 'line_count': 111, 'max_line_length': 90, 'avg_line_length': 21.90990990990991, 'alnum_prop': 0.5308388157894737, 'repo_name': 'harishraj/epicode', 'id': '9327190e63760010c05ca5429f53d68dab61f1ef', 'size': '2432', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'src/main/java/com/interviews/algo/LRUCache.java', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'Java', 'bytes': '1255065'}]} |
#ifndef __CACHEMANAGER_H__
#define __CACHEMANAGER_H__
#include <ghoul/filesystem/directory.h>
#include <ghoul/filesystem/file.h>
#include <map>
#include <string>
namespace ghoul {
namespace filesystem {
/**
* The CacheManager allows users to request a storage location for an, optionally
* persistent, file path to store a cached result. This class only generates and manages
* the file paths and does not do any caching itself. The use case for this is are
* two-fold:<br>
* First, expensive operation that has a result, which gets written to a file and the
* developer wants to retain the results without recomputing it at every application
* start. For this, a cache file is requested with the getCachedFile method and the
* <code>isPersistent</code> flag set to <code>true</code>. Using the same
* <code>file</code> and <code>information</code> values, the same path will be retrieved
* in subsequent application runs.<br>
* The second use-case is a temporary file, also with the getCachedFile method, but the
* <code>isPersistent</code> flag set to <code>false</code>. Non-persistent files will
* automatically be deleted when the program ends.<br>
* The persistent files are stored in a <code>cache</code> file so that they can be
* retained between application runs.
*/
class CacheManager {
public:
/**
* The constructor will automatically register all persistent cache entries from
* previous application runs and clean the directory of non-persistent entries that
* might have been left intact if the previous run crashed. After the constructor
* returns, the CacheManager will leave a cleaned cache directory and the persistent
* files are correctly registered and available.
* \param directory The directory that is used for the CacheManager
* \param version The version of the cache. If a mayor change happens that shouldn't
* be dealt on an individual level, this invalidates previous caches
*/
CacheManager(std::string directory, int version = -1);
/**
* The destructor will save all information on persistent files in a
* <code>cache</code> file in the cache directory that was passed in the constructor
* so that they can be retrieved when the application is started up again. All
* non-persistent files are automatically deleted in the destructor.
*/
~CacheManager();
/**
* Returns the path to a storage location for the cached file. Depending on the
* persistence (<code>isPersistent</code>), the directory and files will automatically
* be cleaned on application end or be made available automatically on the next
* application run. The method will use the date of last modification as a unique
* identifier for the file. Subsequent calls (in the same run or different) with the
* same <code>file</code> will consistently produce the same file path until the
* last-modified date changes. If the cached file was created before, the
* <code>isPersistent</code> parameter is silently ignored.
* \param file The file name of the file for which the cached entry is to be retrieved
* \param cachedFileName The output file name pointing to the cached file that can be
* used by the caller to store the results
* \param isPersistent This parameter will only be used if the cached file is used for
* the first time and determines if the CacheManager should automatically delete the
* file when the application closes (<code>false</code>) or if the file should be kept
* and automatically be re-added to the CacheManager on the next application run
* (<code>true</code>). If the cached file has been created before, this parameter is
* silently ignored.
* \return <code>true</code> if a file name was returned successfully,
* <code>false</code> otherwise
*/
bool getCachedFile(const File& file, std::string& cachedFileName,
bool isPersistent = false);
/**
* Returns the path to a storage location for the cached file. Depending on the
* persistence (<code>isPersistent</code>), the directory and files will automatically
* be cleaned on application end or be made available automatically on the next
* application run. Subsequent calls (in the same run or different) with the same
* <code>file</code> and <code>information</code> will consistently produce the same
* file path. The combination of <code>file</code> and <code>information</code> is the
* unique key for the returned cached file. If the cached file was created before, the
* <code>isPersistent</code> parameter is silently ignored.
* \param file The file name of the file for which the cached entry is to be retrived
* \param information Additional information that is used to uniquely identify the
* cached file. This can be, for example, resolutions or parameters. The combination
* of the <code>file</code> + <code>information</code> uniquely identifies a cached
* file
* \param cachedFileName The output file name pointing to the cached file that can be
* used by the caller to store the results
* \param isPersistent This parameter will only be used if the cached file is used for
* the first time and determines if the CacheManager should automatically delete the
* file when the application closes (<code>false</code>) or if the file should be kept
* and automatically be re-added to the CacheManager on the next application run
* (<code>true</code>). If the cached file has been created before, this parameter is
* silently ignored.
* \return <code>true</code> if a file name was returned successfully,
* <code>false</code> otherwise
*/
bool getCachedFile(const File& file, const std::string& information,
std::string& cachedFileName, bool isPersistent = false);
/**
* Returns the path to a storage location for the cached file. Depending on the
* persistence (<code>isPersistent</code>), the directory and files will automatically
* be cleaned on application end or be made available automatically on the next
* application run. Subsequent calls (in the same run or different) with the same
* <code>baseName</code> and <code>information</code> will consistently produce the
* same file path. The combination of <code>baseName</code> and
* <code>information</code> is the unique key for the returned cached file. If the
* cached file was created before, the <code>isPersistent</code> parameter is silently
* ignored.<br>
* As the <code>baseName</code> will be used as a name for a directory in the file
* system, the usual restrictions apply. The baseName is automatically converted into
* lower case, so that the <code>baseName</code> of <code>base</code>,
* <code>bAsE</code>, and <code>BASE</code> all refer to the same file. Furthermore,
* the <code>baseName</code> cannot contain any of the following characters:
* <code>/</code>, <code>\\</code>, <code>?</code>, <code>%</code>, <code>*</code>,
* <code>:</code>, <code>|</code>, <code>"</code>, <code>\<</code>, <code>\></code>,
* or <code>.</code>
* \param baseName The base name for which the cached entry is to be retrived. A list
* of restrictions apply to this base name (see above)
* \param information Additional information that is used to uniquely identify the
* cached file. This can be, for example, resolutions or parameters. The combination
* of the <code>baseName</code> + <code>information</code> uniquely identifies a
* cached file
* \param cachedFileName The output file name pointing to the cached file that can be
* used by the caller to store the results
* \param isPersistent This parameter will only be used if the cached file is used for
* the first time and determines if the CacheManager should automatically delete the
* file when the application closes (<code>false</code>) or if the file should be kept
* and automatically be re-added to the CacheManager on the next application run
* (<code>true</code>). If the cached file has been created before, this parameter is
* silently ignored.
* \return <code>true</code> if a file name was returned successfully,
* <code>false</code> otherwise
*/
bool getCachedFile(const std::string& baseName, const std::string& information,
std::string& cachedFileName, bool isPersistent = false);
/**
* This method checks if a cached <code>file</code> has been registered before in this
* application run (persistent and non-persistent files) or in a previous run
* (persistent cache files only). Note that this only checks if a file has been
* requested before, not if the cached file has actually been used. The method will
* use the date of last modification as a unique identifier for the file.
* \param file The file for which the cached file should be searched
* \return <code>true</code> if a cached file was requested before; <code>false</code>
* otherwise
*/
bool hasCachedFile(const File& file) const;
/**
* This method checks if a cached <code>file</code> has been registered before in this
* application run (persistent and non-persistent files) or in a previous run
* (persistent cache files only) with the provided <code>information</code>. Note that
* this only checks if a file has been requested before, not if the cached file has
* actually been used.
* \param file The file for which the cached file should be searched
* \param information The identifying information for the file
* \return <code>true</code> if a cached file was requested before; <code>false</code>
* otherwise
*/
bool hasCachedFile(const File& file, const std::string& information) const;
/**
* This method checks if a cached file has been registered before in this
* application run (persistent and non-persistent files) or in a previous run
* (persistent cache files only) with the provided <code>baseName</code> and
* <code>information</code>. Note that this only checks if a file has been requested
* before, not if the cached file has actually been used.
* \param baseName The base name for which the cache file should be searched
* \param information The identifying information for the file
* \return <code>true</code> if a cached file was requested before; <code>false</code>
* otherwise
*/
bool hasCachedFile(const std::string& baseName, const std::string& information) const;
/**
* Removes the cached file and deleted the entry from the CacheManager. If the
* <code>file</code> has not previously been used to request a cache entry, no error
* will be signaled. The method will use the date of last modification as a unique
* identifier for the file.
* \param file The file for which the cache file should be deleted
*/
void removeCacheFile(const File& file);
/**
* Removes the cached file and deleted the entry from the CacheManager. If the
* <code>file</code> has not previously been used to request a cache entry, no error
* will be signaled.
* \param file The file for which the cache file should be deleted
* \param information The detailed information for the cached file which should be
* deleted
*/
void removeCacheFile(const File& file, const std::string& information);
/**
* Removes the cached file and deleted the entry from the CacheManager. If the
* <code>baseName</code> has not previously been used to request a cache entry, no
* error will be signaled. The same restrictions for the <code>baseName</code> as in
* getCachedFile apply to this function.
* \param baseName The base name for which the cache file should be deleted
* \param information The detailed information identifying the cached file that
* should be deleted
*/
void removeCacheFile(const std::string& baseName,
const std::string& information);
protected:
/// This struct stores the cache information for a specific hash value.
struct CacheInformation {
std::string file; ///< The path to the cached file
bool isPersistent; ///< if the cached entry should be automatically deleted
};
typedef std::pair<unsigned int, std::string> LoadedCacheInfo;
/**
* Generates a hash number from the file path and information string
* \return A hash number
*/
unsigned int generateHash(std::string file, std::string information) const;
/**
* Cleans a directory from files not flagged as persistent and removes
*/
void cleanDirectory(const Directory& dir) const;
/**
* Reads informations from a directory about the content and transofrms it to a
* vector of content. This vector is compared with the cache description file
* and used as a foundation for cleaning directories.
*/
std::vector<LoadedCacheInfo> cacheInformationFromDirectory(
const Directory& dir) const;
CacheManager(const CacheManager& c) = delete;
CacheManager(CacheManager&& m) = delete;
CacheManager& operator=(const CacheManager& rhs) = delete;
/// The cache directory
Directory _directory;
/// The cache version
int _version;
/// A map containing file hashes and file information
std::map<unsigned int, CacheInformation> _files;
};
} // namespace filesystem
} // namespace ghoul
#endif // __CACHEMANAGER_H__
| {'content_hash': '89e474bcd70a793fd19fbd227436316d', 'timestamp': '', 'source': 'github', 'line_count': 259, 'max_line_length': 90, 'avg_line_length': 52.640926640926644, 'alnum_prop': 0.7101364236467654, 'repo_name': 'emiax/Ghoul', 'id': 'c1f27d6ec126fd262965858f715d4b9c84363ad9', 'size': '15817', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'include/ghoul/filesystem/cachemanager.h', 'mode': '33188', 'license': 'bsd-3-clause', 'language': [{'name': 'C', 'bytes': '1399211'}, {'name': 'C++', 'bytes': '3545571'}, {'name': 'CMake', 'bytes': '43534'}, {'name': 'CSS', 'bytes': '15039'}, {'name': 'HTML', 'bytes': '1846'}]} |
<?php
use yii\helpers\Html;
use yii\widgets\ActiveForm;
/* @var $this yii\web\View */
/* @var $model app\modules\employee\models\EmpDepartmentSearch */
/* @var $form yii\widgets\ActiveForm */
?>
<div class="emp-department-search">
<?php $form = ActiveForm::begin([
'action' => ['index'],
'method' => 'get',
]); ?>
<?= $form->field($model, 'emp_department_id') ?>
<?= $form->field($model, 'emp_department_name') ?>
<?= $form->field($model, 'emp_department_alias') ?>
<?= $form->field($model, 'created_at') ?>
<?= $form->field($model, 'created_by') ?>
<?php // echo $form->field($model, 'updated_at') ?>
<?php // echo $form->field($model, 'updated_by') ?>
<?php // echo $form->field($model, 'is_status') ?>
<div class="form-group">
<?= Html::submitButton('Search', ['class' => 'btn btn-primary']) ?>
<?= Html::resetButton('Reset', ['class' => 'btn btn-default']) ?>
</div>
<?php ActiveForm::end(); ?>
</div>
| {'content_hash': '204441b722c935b03299ee651aad372c', 'timestamp': '', 'source': 'github', 'line_count': 41, 'max_line_length': 75, 'avg_line_length': 24.51219512195122, 'alnum_prop': 0.5482587064676617, 'repo_name': 'rhythmofnature/lorry', 'id': 'a390c17a28e3324b619279ff7136ae1df01bcc1c', 'size': '1005', 'binary': False, 'copies': '9', 'ref': 'refs/heads/master', 'path': 'modules/employee/views/emp-department/_search.php', 'mode': '33261', 'license': 'bsd-3-clause', 'language': [{'name': 'ApacheConf', 'bytes': '46'}, {'name': 'Batchfile', 'bytes': '515'}, {'name': 'CSS', 'bytes': '364044'}, {'name': 'JavaScript', 'bytes': '605962'}, {'name': 'PHP', 'bytes': '1450014'}]} |
/**
* @author Artem Aliev
*/
package java.lang;
import java.lang.ref.WeakReference;
import java.lang.ref.ReferenceQueue;
class ThreadWeakRef extends WeakReference<Thread> {
private static ReferenceQueue<Thread> refQueue = new ReferenceQueue<Thread>();
private long nativeAddr = 0;
public ThreadWeakRef(Thread thread) {
super(thread, ThreadWeakRef.refQueue);
}
public void setNativeAddr(long newAddr) {
nativeAddr = newAddr;
}
public long getNativeAddr() {
return nativeAddr;
}
public static ThreadWeakRef poll() {
return (ThreadWeakRef)( WeakReference<Thread>)refQueue.poll();
}
}
| {'content_hash': 'cff5a5a6ac2af472d581974b7840eb53', 'timestamp': '', 'source': 'github', 'line_count': 30, 'max_line_length': 82, 'avg_line_length': 22.466666666666665, 'alnum_prop': 0.6735905044510386, 'repo_name': 'freeVM/freeVM', 'id': '108133c94619b86ecf5dec8e3b8912d444f780fc', 'size': '1486', 'binary': False, 'copies': '3', 'ref': 'refs/heads/master', 'path': 'enhanced/java/drlvm/vm/vmcore/src/kernel_classes/javasrc/java/lang/ThreadWeakRef.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Assembly', 'bytes': '116828'}, {'name': 'C', 'bytes': '17860389'}, {'name': 'C++', 'bytes': '19007206'}, {'name': 'CSS', 'bytes': '217777'}, {'name': 'Java', 'bytes': '152108632'}, {'name': 'Objective-C', 'bytes': '106412'}, {'name': 'Objective-J', 'bytes': '11029421'}, {'name': 'Perl', 'bytes': '305690'}, {'name': 'Scilab', 'bytes': '34'}, {'name': 'Shell', 'bytes': '153821'}, {'name': 'XSLT', 'bytes': '152859'}]} |
// Copyright 2015 XLGAMES Inc.
//
// Distributed under the MIT License (See
// accompanying file "LICENSE" or the website
// http://www.opensource.org/licenses/mit-license.php)
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Linq;
using System.Windows.Forms;
using Sce.Atf.Adaptation;
using Sce.Atf.Applications;
using Sce.Atf.Dom;
using LevelEditorCore;
using LevelEditorXLE.Extensions;
namespace LevelEditorXLE.Terrain
{
using TerrainST = Schema.terrainType;
static class Helpers
{
public static T Clamp<T>(this T val, T min, T max) where T : IComparable<T>
{
if (val.CompareTo(min) < 0) return min;
else if (val.CompareTo(max) > 0) return max;
else return val;
}
}
class XLETerrainGob : DomNodeAdapter, IListable, ICommandClient, IContextMenuCommandProvider, IExportable, IHierarchical, XLEBridgeUtils.INativeObjectAdapter
{
public XLETerrainGob() {}
public void GetInfo(ItemInfo info)
{
info.ImageIndex = Util.GetTypeImageIndex(DomNode.Type, info.GetImageList());
info.Label = "Terrain";
}
public static DomNode Create() { return new DomNode(TerrainST.Type); }
public static DomNode CreateWithConfigure()
{
var result = new DomNode(TerrainST.Type);
var adapter = result.As<XLETerrainGob>();
if (adapter != null && adapter.DoModalConfigure())
return result;
return null;
}
public Uri UberSurfaceDirectory
{
get { return GetAttribute<Uri>(TerrainST.UberSurfaceDirAttribute); }
set { SetAttribute(TerrainST.UberSurfaceDirAttribute, value); }
}
public Uri CellsDirectory
{
get { return GetAttribute<Uri>(TerrainST.CellsDirAttribute); }
set { SetAttribute(TerrainST.CellsDirAttribute, value); }
}
public uint NodeDimensions
{
get { return GetAttribute<uint>(TerrainST.NodeDimensionsAttribute); }
set { SetAttribute(TerrainST.NodeDimensionsAttribute, ClampNodeDimensions(value)); }
}
public uint Overlap
{
get { return GetAttribute<uint>(TerrainST.OverlapAttribute); }
set { SetAttribute(TerrainST.OverlapAttribute, value); }
}
public float Spacing
{
get { return GetAttribute<float>(TerrainST.SpacingAttribute); }
set { SetAttribute(TerrainST.SpacingAttribute, value); }
}
public uint CellTreeDepth
{
get { return GetAttribute<uint>(TerrainST.CellTreeDepthAttribute); }
set { SetAttribute(TerrainST.CellTreeDepthAttribute, ClampCellTreeDepth(value)); }
}
public bool HasEncodedGradientFlags
{
get { return GetAttribute<bool>(TerrainST.HasEncodedGradientFlagsAttribute); }
set { SetAttribute(TerrainST.HasEncodedGradientFlagsAttribute, value); }
}
public float SunPathAngle
{
get { return GetAttribute<float>(TerrainST.SunPathAngleAttribute); }
set { SetAttribute(TerrainST.SunPathAngleAttribute, value); }
}
public uint[] CellCount
{
get { return GetAttribute<uint[]>(TerrainST.CellCountAttribute); }
set { SetAttribute(TerrainST.CellCountAttribute, value); }
}
public float GradFlagSlopeThreshold0
{
get { return GetAttribute<float>(TerrainST.GradFlagSlopeThreshold0Attribute); }
set { SetAttribute(TerrainST.GradFlagSlopeThreshold0Attribute, value); }
}
public float GradFlagSlopeThreshold1
{
get { return GetAttribute<float>(TerrainST.GradFlagSlopeThreshold1Attribute); }
set { SetAttribute(TerrainST.GradFlagSlopeThreshold1Attribute, value); }
}
public float GradFlagSlopeThreshold2
{
get { return GetAttribute<float>(TerrainST.GradFlagSlopeThreshold2Attribute); }
set { SetAttribute(TerrainST.GradFlagSlopeThreshold2Attribute, value); }
}
public Uri ConfigExportTarget
{
get
{
var fn = GetAttribute<string>(TerrainST.ConfigFileTargetAttribute);
var game = DomNode.GetRoot().As<Game.GameExtensions>();
if (game != null)
return new Uri(game.ExportDirectory, fn);
return Utils.CurrentDirectoryAsUri();
}
}
public DomNode BaseTexture
{
get { return DomNode.GetChild(TerrainST.baseTextureChild); }
set { DomNode.SetChild(TerrainST.baseTextureChild, value); }
}
public DomNode VegetationSpawn
{
get { return DomNode.GetChild(TerrainST.VegetationSpawnChild); }
set { DomNode.SetChild(TerrainST.VegetationSpawnChild, value); }
}
public IList<XLETerrainCoverage> CoverageLayers
{
get
{
return GetChildList<XLETerrainCoverage>(TerrainST.coverageChild);
}
}
public bool HasCoverageLayer(uint layerId)
{
var layers = CoverageLayers;
foreach (var l in layers)
if (l.LayerId == layerId) return true;
return false;
}
private static uint ClampNodeDimensions(uint input) { return input.Clamp(1u, 1024u); }
private static uint ClampCellTreeDepth(uint input) { return input.Clamp(1u, 16u); }
private static bool IsDerivedFrom(DomNode node, DomNodeType type) { return node.Type.Lineage.FirstOrDefault(t => t == type) != null; }
private bool m_isLoaded = false;
public bool CanAddChild(object child)
{
var domNode = child.As<DomNode>();
if (domNode == null) return false;
return IsDerivedFrom(domNode, Schema.vegetationSpawnConfigType.Type)
|| IsDerivedFrom(domNode, Schema.abstractTerrainMaterialDescType.Type)
|| IsDerivedFrom(domNode, Schema.terrainBaseTextureType.Type)
;
}
public bool AddChild(object child)
{
var domNode = child.As<DomNode>();
if (domNode != null && IsDerivedFrom(domNode, Schema.vegetationSpawnConfigType.Type))
{
SetChild(Schema.terrainType.VegetationSpawnChild, domNode);
return true;
}
if (domNode != null && IsDerivedFrom(domNode, Schema.terrainBaseTextureType.Type))
{
SetChild(Schema.terrainType.baseTextureChild, domNode);
return true;
}
if (domNode != null && IsDerivedFrom(domNode, Schema.abstractTerrainMaterialDescType.Type))
{
if (BaseTexture == null)
BaseTexture = new DomNode(Schema.terrainBaseTextureType.Type);
BaseTexture.GetChildList(Schema.terrainBaseTextureType.materialChild).Add(domNode);
return true;
}
return false;
}
internal IEnumerable<uint> GetAllLayerIds()
{
List<uint> result = new List<uint>();
result.Add(1);
foreach (var l in CoverageLayers)
result.Add(l.LayerId);
return result;
}
#region NativeObjectAdapter
// protected override void OnNodeSet() {}
public void OnAddToDocument(XLEBridgeUtils.INativeDocumentAdapter doc)
{
// This should be called after the node is fully created and the
// terrain is ready to be loaded. We can call Reload() to build
// the native terrain from here.
if (!m_isLoaded) Reload();
}
public void OnRemoveFromDocument(XLEBridgeUtils.INativeDocumentAdapter doc)
{
// Note -- potential issues here if there are currently locks on the
// terrain. The unload won't succeed in that case.
if (m_isLoaded) Unload();
}
public void OnSetParent(XLEBridgeUtils.INativeObjectAdapter newParent, int insertionPosition) { }
#endregion
#region IExportable
public Uri CacheExportTarget { get { return new Uri(CellsDirectory, "cached.dat"); } }
public string ExportCategory { get { return "Terrain"; } }
public IEnumerable<PendingExport> BuildPendingExports()
{
var sceneMan = this.GetSceneManager();
var result = new List<PendingExport>();
result.Add(new PendingExport(CacheExportTarget, sceneMan.ExportTerrainCachedData()));
result.Add(
new PendingExport(
ConfigExportTarget,
sceneMan.ExportTerrain(BuildEngineConfig(BuildDialogConfig()))));
return result;
}
#endregion
#region Internal Low Level
internal void Unload()
{
try
{
this.GetSceneManager().UnloadTerrain();
m_isLoaded = false;
}
catch (Exception e)
{
ControlsLibrary.BasicControls.ExceptionReport.Show(e, "Unloading terrain");
}
}
internal void Reload()
{
try
{
this.GetSceneManager().ReloadTerrain(BuildEngineConfig());
m_isLoaded = true;
GUILayer.EditorInterfaceUtils.SetShortCircuitSettings(
this.GetSceneManager(),
HasEncodedGradientFlags, Spacing,
GradFlagSlopeThreshold0, GradFlagSlopeThreshold1, GradFlagSlopeThreshold2);
}
catch (Exception e)
{
ControlsLibrary.BasicControls.ExceptionReport.Show(e, "Unloading terrain");
}
}
private GUILayer.TerrainConfig BuildEngineConfig(TerrainConfig.Config cfg)
{
var result = new GUILayer.TerrainConfig(
cfg.CellsDirectory,
cfg.NodeDimensions, cfg.CellTreeDepth, cfg.Overlap,
cfg.Spacing, (float)(cfg.SunPathAngle * Math.PI / 180.0f),
cfg.HasEncodedGradientFlags);
result.CellCount = new GUILayer.VectorUInt2(CellCount[0], CellCount[1]);
var layers = CoverageLayers;
foreach (var l in layers)
{
if (!l.Enable) continue;
// we should avoid adding multiple layers with the same id
var id = l.LayerId;
bool alreadyHere = false;
for (uint c=0; c<result.CoverageLayerCount; ++c)
if (result.GetCoverageLayer(c).Id == id)
{
alreadyHere = true;
break;
}
if (alreadyHere) break;
var d = GUILayer.EditorInterfaceUtils.DefaultCoverageLayer(
result, cfg.UberSurfaceDirectory, id);
d.NodeDims = new GUILayer.VectorUInt2(
(uint)(l.Resolution * cfg.NodeDimensions),
(uint)(l.Resolution * cfg.NodeDimensions));
if (l.Format != 0)
{
d.FormatCat = l.Format;
d.FormatArrayCount = 1;
}
d.ShaderNormalizationMode = l.ShaderNormalizationMode;
result.Add(d);
}
return result;
}
#endregion
#region Configure Steps
internal TerrainConfig.Config BuildDialogConfig()
{
var cfg = new TerrainConfig.Config();
cfg.NodeDimensions = NodeDimensions;
cfg.Overlap = Overlap;
cfg.Spacing = Spacing;
cfg.CellTreeDepth = CellTreeDepth;
cfg.UberSurfaceDirectory = UberSurfaceDirectory != null ? UberSurfaceDirectory.LocalPath : "";
cfg.CellsDirectory = CellsDirectory != null ? CellsDirectory.LocalPath : "";
cfg.HasEncodedGradientFlags = HasEncodedGradientFlags;
cfg.SunPathAngle = SunPathAngle;
cfg.SlopeThreshold0 = GradFlagSlopeThreshold0;
cfg.SlopeThreshold1 = GradFlagSlopeThreshold1;
cfg.SlopeThreshold2 = GradFlagSlopeThreshold2;
return cfg;
}
internal void CommitDialogConfig(TerrainConfig.Config cfg)
{
NodeDimensions = cfg.NodeDimensions;
Overlap = cfg.Overlap;
CellTreeDepth = cfg.CellTreeDepth;
Spacing = cfg.Spacing;
UberSurfaceDirectory = new Uri(cfg.UberSurfaceDirectory.TrimEnd('\\') + "\\");
CellsDirectory = new Uri(cfg.CellsDirectory.TrimEnd('\\') + "\\");
HasEncodedGradientFlags = cfg.HasEncodedGradientFlags;
SunPathAngle = cfg.SunPathAngle;
GradFlagSlopeThreshold0 = cfg.SlopeThreshold0;
GradFlagSlopeThreshold1 = cfg.SlopeThreshold1;
GradFlagSlopeThreshold2 = cfg.SlopeThreshold2;
}
internal GUILayer.TerrainConfig BuildEngineConfig()
{
return BuildEngineConfig(BuildDialogConfig());
}
static internal void Show(Exception e, string whileMessage)
{
ControlsLibrary.BasicControls.ExceptionReport.Show(e, whileMessage);
}
internal bool Reconfigure(TerrainConfig.Config cfg)
{
cfg.NodeDimensions = ClampNodeDimensions(cfg.NodeDimensions);
cfg.CellTreeDepth = ClampCellTreeDepth(cfg.CellTreeDepth);
Unload();
try
{
var newCellCount = new GUILayer.VectorUInt2(0, 0);
using (var progress = new ControlsLibrary.ProgressDialog.ProgressInterface())
{
bool rebuildCellFiles = false;
// if there is a source DEM file specified then we should
// attempt to build the starter uber surface.
if (cfg.Import == TerrainConfig.Config.ImportType.DEMFile
&& cfg.SourceDEMFile != null && cfg.SourceDEMFile.Length > 0)
{
cfg.ImportOp.ExecuteForHeights(cfg.UberSurfaceDirectory, progress);
rebuildCellFiles = true;
}
else if (cfg.Import == TerrainConfig.Config.ImportType.NewBlankTerrain
&& cfg.NewCellCountX != 0 && cfg.NewCellCountY != 0)
{
GUILayer.EditorInterfaceUtils.GenerateBlankUberSurface(
cfg.UberSurfaceDirectory, cfg.NewCellCountX, cfg.NewCellCountY,
cfg.NodeDimensions, cfg.CellTreeDepth,
progress);
rebuildCellFiles = true;
}
var engineCfg = BuildEngineConfig(cfg);
engineCfg.InitCellCountFromUberSurface(cfg.UberSurfaceDirectory);
newCellCount = engineCfg.CellCount;
// fill in the cells directory with starter cells (if they don't already exist)
// (and build empty uber surface files for any that are missing)
GUILayer.EditorInterfaceUtils.GenerateMissingUberSurfaceFiles(
engineCfg, cfg.UberSurfaceDirectory, progress);
GUILayer.EditorInterfaceUtils.GenerateCellFiles(
engineCfg, cfg.UberSurfaceDirectory, rebuildCellFiles,
cfg.SlopeThreshold0, cfg.SlopeThreshold1, cfg.SlopeThreshold2,
progress);
}
// if the above completed without throwing an exception, we can commit the values
CommitDialogConfig(cfg);
CellCount = new uint[2] { newCellCount.X, newCellCount.Y };
}
catch (Exception e)
{
Show(e, "terrain configure operation");
return false; // this will prevent reload on exception. Terrain will remain in an unloaded state
}
Reload();
return true;
}
internal bool Reconfigure() { return Reconfigure(BuildDialogConfig()); }
internal bool DoModalConfigure()
{
// open the configuration dialog
using (var dlg = new TerrainConfig())
{
dlg.Value = BuildDialogConfig();
var result = dlg.ShowDialog();
if (result == DialogResult.OK)
return Reconfigure(dlg.Value);
}
return false;
}
#endregion
#region Commands
void DoGenerateShadows()
{
Unload();
try
{
using (var progress = new ControlsLibrary.ProgressDialog.ProgressInterface())
{
GUILayer.EditorInterfaceUtils.GenerateShadowsSurface(
BuildEngineConfig(), UberSurfaceDirectory.LocalPath,
progress);
}
using (var progress = new ControlsLibrary.ProgressDialog.ProgressInterface())
{
GUILayer.EditorInterfaceUtils.GenerateAmbientOcclusionSurface(
BuildEngineConfig(), UberSurfaceDirectory.LocalPath,
progress);
}
}
catch (Exception e)
{
Show(e, "shadow generation operation");
}
Reload();
}
void DoFlushToDisk()
{
using (var progress = new ControlsLibrary.ProgressDialog.ProgressInterface())
{
GUILayer.EditorInterfaceUtils.FlushTerrainToDisk(this.GetSceneManager(), progress);
}
}
void DoRebuildCellFiles()
{
Unload();
try
{
using (var progress = new ControlsLibrary.ProgressDialog.ProgressInterface())
{
GUILayer.EditorInterfaceUtils.GenerateCellFiles(
BuildEngineConfig(), UberSurfaceDirectory.LocalPath, true,
GradFlagSlopeThreshold0, GradFlagSlopeThreshold1, GradFlagSlopeThreshold2, progress);
}
}
catch (Exception e)
{
Show(e, "rebuilding cell files");
}
Reload();
}
internal void DoExportHeights()
{
var fileDlg = new SaveFileDialog();
fileDlg.Filter = "Tiff files|*.tiff;*.tif";
if (fileDlg.ShowDialog() == DialogResult.OK)
{
try
{
Unload();
using (var progress = new ControlsLibrary.ProgressDialog.ProgressInterface())
{
GUILayer.EditorInterfaceUtils.ExecuteTerrainExport(
fileDlg.FileName,
BuildEngineConfig(),
UberSurfaceDirectory.LocalPath,
1, progress);
}
Reload();
}
catch (Exception e)
{
Show(e, "Terrain export to tiff");
}
}
}
#region ICommandClient Members
bool ICommandClient.CanDoCommand(object commandTag)
{
if (commandTag is Command)
{
switch ((Command)commandTag)
{
case Command.CreateBaseTexture:
return BaseTexture == null;
case Command.CreateVegetationSpawn:
return VegetationSpawn == null;
case Command.Configure:
case Command.GenerateShadows:
case Command.FlushToDisk:
case Command.RebuildCellFiles:
case Command.Reload:
case Command.AddGenericCoverage:
case Command.ExportToTiff:
return true;
case Command.AddShadows:
case Command.AddAO:
case Command.AddBaseMaterialCoverage:
case Command.AddDecorationCoverage:
return !HasCoverageLayer(AssociatedLayerId((Command)commandTag));
case Command.Unload:
return m_isLoaded;
}
}
return false;
}
uint AssociatedLayerId(Command cmd)
{
switch (cmd)
{
case Command.AddShadows: return 2;
case Command.AddAO: return 3;
case Command.AddBaseMaterialCoverage: return 1000;
case Command.AddDecorationCoverage: return 1001;
default: return 0;
}
}
void ICommandClient.DoCommand(object commandTag)
{
if (!(commandTag is Command)) return;
switch ((Command)commandTag)
{
case Command.CreateBaseTexture:
{
ApplicationUtil.Insert(
DomNode.GetRoot(), this, new DomNode(Schema.terrainBaseTextureType.Type),
"Create Terrain Texturing", null);
break;
}
case Command.CreateVegetationSpawn:
{
ApplicationUtil.Insert(
DomNode.GetRoot(), this, new DomNode(Schema.vegetationSpawnConfigType.Type),
"Create Vegetation Config", null);
break;
}
case Command.Configure:
{
DoModalConfigure();
break;
}
case Command.GenerateShadows:
{
DoGenerateShadows();
break;
}
case Command.FlushToDisk:
{
DoFlushToDisk();
break;
}
case Command.RebuildCellFiles:
{
DoRebuildCellFiles();
break;
}
case Command.Reload:
{
Reload();
break;
}
case Command.Unload:
{
Unload();
break;
}
case Command.ExportToTiff:
DoExportHeights();
break;
case Command.AddShadows:
case Command.AddAO:
case Command.AddBaseMaterialCoverage:
case Command.AddDecorationCoverage:
{
var layerId = AssociatedLayerId((Command)commandTag);
if (!HasCoverageLayer(layerId))
{
var layer = XLETerrainCoverage.CreateWithConfigure(this, layerId).As<XLETerrainCoverage>();
if (layer != null)
{
if (!HasCoverageLayer(layer.LayerId))
{
CoverageLayers.Add(layer);
Reconfigure();
}
else
{
MessageBox.Show(
"Layer id conflicts with existing id. You can't have 2 layers with the same id. Try using a unique id", "Error adding layer",
MessageBoxButtons.OK,
MessageBoxIcon.Error);
}
}
}
break;
}
case Command.AddGenericCoverage:
{
var layerId = 1003u;
while (HasCoverageLayer(layerId)) ++layerId;
var layer = XLETerrainCoverage.CreateWithConfigure(this, layerId).As<XLETerrainCoverage>();
if (layer != null)
{
if (!HasCoverageLayer(layer.LayerId))
{
CoverageLayers.Add(layer);
Reconfigure();
}
else
{
MessageBox.Show(
"Layer id conflicts with existing id. You can't have 2 layers with the same id. Try using a unique id", "Error adding layer",
MessageBoxButtons.OK,
MessageBoxIcon.Error);
}
}
break;
}
}
}
void ICommandClient.UpdateCommand(object commandTag, CommandState commandState)
{ }
#endregion
private enum Command
{
[Description("Add Texturing Settings")] CreateBaseTexture,
[Description("Add Decoration Settings")] CreateVegetationSpawn,
[Description("Configure Terrain...")] Configure,
[Description("Generate Shadows")] GenerateShadows,
[Description("Commit to disk")] FlushToDisk,
[Description("Rebuild cell files")] RebuildCellFiles,
[Description("Unload terrain")] Unload,
[Description("Reload terrain")] Reload,
[Description("Export to TIFF")] ExportToTiff,
[Description("Add Shadows")] AddShadows,
[Description("Add Ambient Occlusion")] AddAO,
[Description("Add Base Material Coverage")] AddBaseMaterialCoverage,
[Description("Add Decoration Coverage")] AddDecorationCoverage,
[Description("Add Generic Converage")] AddGenericCoverage
}
IEnumerable<object> IContextMenuCommandProvider.GetCommands(object context, object target)
{
foreach (Command command in Enum.GetValues(typeof(Command)))
{
yield return command;
}
}
#endregion
}
class XLETerrainCoverage : DomNodeAdapter, IListable, ICommandClient, IContextMenuCommandProvider
{
public void GetInfo(ItemInfo info)
{
info.ImageIndex = Util.GetTypeImageIndex(DomNode.Type, info.GetImageList());
info.Label = "Coverage: " + LayerName;
}
public static DomNode Create() { return new DomNode(Schema.terrainCoverageLayer.Type); }
public static DomNode CreateWithConfigure(XLETerrainGob terrain, uint id)
{
var result = new DomNode(Schema.terrainCoverageLayer.Type);
var adapter = result.As<XLETerrainCoverage>();
if (adapter != null)
{
adapter.LayerId = id;
if (adapter.DoModalConfigure(terrain))
return result;
}
return null;
}
public uint LayerId
{
get { return GetAttribute<uint>(Schema.terrainCoverageLayer.IdAttribute); }
set { SetAttribute(Schema.terrainCoverageLayer.IdAttribute, value); }
}
public string LayerName
{
get
{
switch (LayerId)
{
case 0:
case 1: return "Heights";
case 2: return "Shadows";
case 3: return "AO";
case 1000: return "Base Material";
case 1001: return "Decoration";
default: return LayerId.ToString();
}
}
}
public float Resolution
{
get { return GetAttribute<float>(Schema.terrainCoverageLayer.ResolutionAttribute); }
set { SetAttribute(Schema.terrainCoverageLayer.ResolutionAttribute, value); }
}
public Uri SourceFile
{
get { return GetAttribute<Uri>(Schema.terrainCoverageLayer.SourceFileAttribute); }
set { SetAttribute(Schema.terrainCoverageLayer.SourceFileAttribute, value); }
}
public uint Overlap
{
get { return GetAttribute<uint>(Schema.terrainCoverageLayer.OverlapAttribute); }
set { SetAttribute(Schema.terrainCoverageLayer.OverlapAttribute, value); }
}
public bool Enable
{
get { return GetAttribute<bool>(Schema.terrainCoverageLayer.EnableAttribute); }
set { SetAttribute(Schema.terrainCoverageLayer.EnableAttribute, value); }
}
public uint Format
{
get { return GetAttribute<uint>(Schema.terrainCoverageLayer.FormatAttribute); }
set { SetAttribute(Schema.terrainCoverageLayer.FormatAttribute, value); }
}
public uint ShaderNormalizationMode
{
get { return GetAttribute<uint>(Schema.terrainCoverageLayer.ShaderNormalizationModeAttribute); }
set { SetAttribute(Schema.terrainCoverageLayer.ShaderNormalizationModeAttribute, value); }
}
public XLETerrainGob Parent { get { return DomNode.Parent.As<XLETerrainGob>(); } }
#region Configure Steps
internal TerrainCoverageConfig.Config BuildDialogConfig(XLETerrainGob terrain)
{
var cfg = new TerrainCoverageConfig.Config();
cfg.NodeDimensions = terrain.NodeDimensions;
cfg.CellTreeDepth = terrain.CellTreeDepth;
cfg.Resolution = Resolution;
cfg.SourceFile = SourceFile != null ? SourceFile.LocalPath : "";
cfg.Enable = Enable;
cfg.Id = LayerId;
cfg.ShaderNormalizationMode = ShaderNormalizationMode;
return cfg;
}
internal void CommitDialogConfig(TerrainCoverageConfig.Config cfg)
{
Resolution = cfg.Resolution;
SourceFile = String.IsNullOrEmpty(cfg.SourceFile) ? null : new Uri(cfg.SourceFile);
Enable = cfg.Enable;
LayerId = cfg.Id;
ShaderNormalizationMode = cfg.ShaderNormalizationMode;
}
internal bool Reconfigure(TerrainCoverageConfig.Config cfg, XLETerrainGob terrain)
{
if (terrain != null)
terrain.Unload(); // (have to unload to write to ubersurface)
try
{
// If an import or create operation was requested, we
// must perform those now. Note that this might require
// some format changes.
if (cfg.Import == TerrainCoverageConfig.Config.ImportType.DEMFile
&& cfg.SourceFile != null && cfg.SourceFile.Length > 0
&& terrain!=null)
{
using (var progress = new ControlsLibrary.ProgressDialog.ProgressInterface())
{
cfg.ImportOp.Execute(
terrain.UberSurfaceDirectory.LocalPath,
cfg.Id, cfg.ImportOp.ImportCoverageFormat,
progress);
}
Format = cfg.ImportOp.ImportCoverageFormat;
}
else if (cfg.Import == TerrainCoverageConfig.Config.ImportType.NewBlankTerrain)
{
// todo -- create new blank coverage with the given format
}
}
catch (Exception e)
{
XLETerrainGob.Show(e, "Terrain import operation");
return false;
}
CommitDialogConfig(cfg);
if (terrain!=null)
terrain.Reconfigure();
return true;
}
internal bool DoModalConfigure(XLETerrainGob terrain)
{
// open the configuration dialog
using (var dlg = new TerrainCoverageConfig())
{
dlg.Value = BuildDialogConfig(terrain);
var result = dlg.ShowDialog();
if (result == System.Windows.Forms.DialogResult.OK)
{
return Reconfigure(dlg.Value, terrain);
}
}
return false;
}
#endregion
internal void DoExport(XLETerrainGob terrain)
{
if (terrain == null) return;
var fileDlg = new SaveFileDialog();
fileDlg.Filter = "Tiff files|*.tiff;*.tif";
if (fileDlg.ShowDialog() == DialogResult.OK)
{
try
{
terrain.Unload();
using (var progress = new ControlsLibrary.ProgressDialog.ProgressInterface())
{
GUILayer.EditorInterfaceUtils.ExecuteTerrainExport(
fileDlg.FileName,
terrain.BuildEngineConfig(),
terrain.UberSurfaceDirectory.LocalPath,
LayerId, progress);
}
terrain.Reload();
}
catch (Exception e)
{
XLETerrainGob.Show(e, "Terrain export to tiff");
}
}
}
void DoRebuildCellFiles(XLETerrainGob terrain)
{
if (terrain == null) return;
terrain.Unload();
try
{
using (var progress = new ControlsLibrary.ProgressDialog.ProgressInterface())
{
GUILayer.EditorInterfaceUtils.GenerateCellFiles(
terrain.BuildEngineConfig(), terrain.UberSurfaceDirectory.LocalPath, true,
LayerId, progress);
}
}
catch (Exception e)
{
XLETerrainGob.Show(e, "Rebuilding cell files");
}
terrain.Reload();
}
#region ICommandClient Members
bool ICommandClient.CanDoCommand(object commandTag)
{
if (commandTag is Command)
{
switch ((Command)commandTag)
{
case Command.Configure:
case Command.Export:
case Command.RebuildLayerCellFiles:
return true;
}
}
return false;
}
void ICommandClient.DoCommand(object commandTag)
{
if (!(commandTag is Command)) return;
switch ((Command)commandTag)
{
case Command.Configure:
DoModalConfigure(Parent);
break;
case Command.Export:
DoExport(Parent);
break;
case Command.RebuildLayerCellFiles:
DoRebuildCellFiles(Parent);
break;
}
}
void ICommandClient.UpdateCommand(object commandTag, CommandState commandState)
{ }
#endregion
private enum Command
{
[Description("Configure...")] Configure,
[Description("Export...")] Export,
[Description("Rebuild layer cell files")] RebuildLayerCellFiles
}
IEnumerable<object> IContextMenuCommandProvider.GetCommands(object context, object target)
{
foreach (Command command in Enum.GetValues(typeof(Command)))
{
yield return command;
}
}
}
}
| {'content_hash': '34b33052a8be6a2ed19669966a63c943', 'timestamp': '', 'source': 'github', 'line_count': 999, 'max_line_length': 165, 'avg_line_length': 36.74274274274274, 'alnum_prop': 0.5225848635100528, 'repo_name': 'xlgames-inc/XLE', 'id': 'ed70d8e5ff1a2579f62fdbf6700921556249d5a3', 'size': '36708', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'Foreign/SonyLE/LevelEditorXLE/Terrain/XLETerrainGob.cs', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'Assembly', 'bytes': '3201604'}, {'name': 'Awk', 'bytes': '3962'}, {'name': 'Batchfile', 'bytes': '3360'}, {'name': 'C', 'bytes': '10674877'}, {'name': 'C#', 'bytes': '2848942'}, {'name': 'C++', 'bytes': '19256138'}, {'name': 'CMake', 'bytes': '61254'}, {'name': 'CSS', 'bytes': '27391'}, {'name': 'DIGITAL Command Language', 'bytes': '35816'}, {'name': 'Fortran', 'bytes': '1454013'}, {'name': 'GAP', 'bytes': '20112'}, {'name': 'GLSL', 'bytes': '39352'}, {'name': 'GSC', 'bytes': '54863'}, {'name': 'Groovy', 'bytes': '11823'}, {'name': 'HLSL', 'bytes': '426796'}, {'name': 'HTML', 'bytes': '530940'}, {'name': 'JavaScript', 'bytes': '15993'}, {'name': 'M4', 'bytes': '20151'}, {'name': 'Makefile', 'bytes': '266194'}, {'name': 'Perl', 'bytes': '12798'}, {'name': 'Python', 'bytes': '187255'}, {'name': 'Rich Text Format', 'bytes': '46532'}, {'name': 'Roff', 'bytes': '7542'}, {'name': 'Shell', 'bytes': '848671'}, {'name': 'sed', 'bytes': '236'}]} |
<?php namespace OneAuth\OAuth2\Token;
use OneAuth\OAuth2\Token as OAuth2_Token,
OneAuth\OAuth2\Exception;
class Authorize extends OAuth2_Token
{
/**
* @var string code
*/
protected $code;
/**
* @var string redirect_uri
*/
protected $redirect_uri;
/**
* Sets the token, expiry, etc values.
*
* @param array token options
* @return void
*/
public function __construct(array $options)
{
if ( ! isset($options['code']))
{
throw new Exception(array('message' => 'Required option not passed: code'));
}
elseif ( ! isset($options['redirect_uri']))
{
throw new Exception(array('message' => 'Required option not passed: redirect_uri'));
}
$this->code = $options['code'];
$this->redirect_uri = $options['redirect_uri'];
}
/**
* Returns the token key.
*
* @return string
*/
public function __toString()
{
return (string) $this->code;
}
} // End Token_Access
| {'content_hash': '51888f841b7334f7a9902526e9edc33b', 'timestamp': '', 'source': 'github', 'line_count': 52, 'max_line_length': 96, 'avg_line_length': 18.942307692307693, 'alnum_prop': 0.5908629441624366, 'repo_name': 'BrokenLegs/GreenCircleOnline', 'id': '6a484f5985fcfaae9501c58ab320ef58b8d60236', 'size': '1155', 'binary': False, 'copies': '3', 'ref': 'refs/heads/master', 'path': 'core/bundles/oneauth/libraries/oauth2/token/authorize.php', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'JavaScript', 'bytes': '207523'}, {'name': 'PHP', 'bytes': '1212324'}]} |
package com.intellij.psi.impl.source.tree.java;
import com.intellij.psi.JavaElementVisitor;
import com.intellij.psi.PsiElementVisitor;
import com.intellij.psi.PsiExpression;
import com.intellij.psi.PsiType;
import com.intellij.psi.impl.source.Constants;
import com.intellij.psi.impl.source.tree.JavaElementType;
import org.jetbrains.annotations.NotNull;
public class PsiEmptyExpressionImpl extends ExpressionPsiElement implements PsiExpression{
public PsiEmptyExpressionImpl() {
super(JavaElementType.EMPTY_EXPRESSION);
}
@Override
public PsiType getType() {
return null;
}
@Override
public void accept(@NotNull PsiElementVisitor visitor) {
if (visitor instanceof JavaElementVisitor) {
((JavaElementVisitor)visitor).visitExpression(this);
}
else {
visitor.visitElement(this);
}
}
@Override
public String toString() {
return "PsiExpression(empty)";
}
}
| {'content_hash': 'b18202322acae12f20eea643ee3e1b30', 'timestamp': '', 'source': 'github', 'line_count': 36, 'max_line_length': 90, 'avg_line_length': 25.555555555555557, 'alnum_prop': 0.7576086956521739, 'repo_name': 'mdanielwork/intellij-community', 'id': '1c2153ac35f9805ff68b70bbbe1a76ee9360fd2f', 'size': '1520', 'binary': False, 'copies': '3', 'ref': 'refs/heads/master', 'path': 'java/java-psi-impl/src/com/intellij/psi/impl/source/tree/java/PsiEmptyExpressionImpl.java', 'mode': '33188', 'license': 'apache-2.0', 'language': []} |
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null | {'content_hash': '9b8be2feea51fe000ff1d7e13fe3fe8b', 'timestamp': '', 'source': 'github', 'line_count': 13, 'max_line_length': 31, 'avg_line_length': 9.692307692307692, 'alnum_prop': 0.7063492063492064, 'repo_name': 'mdoering/backbone', 'id': '7440cd5355a5e38632c3ab82da91a7a278710c0f', 'size': '175', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'life/Plantae/Magnoliophyta/Magnoliopsida/Saxifragales/Saxifragaceae/Astilbe/Astilbe longicarpa/README.md', 'mode': '33188', 'license': 'apache-2.0', 'language': []} |
import {
PLAY, PLAYING, PLAY_FAILED, PAUSE, PAUSED, SCRUB_TO, SEEK_TO,
FADE_OUT_AND_PAUSE, PLAY_AND_FADE_IN,
CHANGE_VOLUME_FACTOR,
PREBUFFER, PREBUFFERED,
BUFFER_UNDERRUN, BUFFER_UNDERRUN_CONTINUE, WAITING, SEEKING, SEEKED,
META_DATA_LOADED, PROGRESS, TIME_UPDATE, ENDED,
HAS_NOT_BEEN_PLAYING_FOR_A_MOMENT,
USER_INTERACTION, USER_IDLE,
CONTROLS_ENTERED, CONTROLS_LEFT,
FOCUS_ENTERED_CONTROLS, FOCUS_LEFT_CONTROLS,
CONTROLS_HIDDEN,
SHOW_INFO_BOX_DURING_PLAYBACK, HIDE_INFO_BOX_DURING_PLAYBACK, TOGGLE_INFO_BOX_DURING_PLAYBACK,
SAVE_MEDIA_ELEMENT_ID, DISCARD_MEDIA_ELEMENT_ID
} from './actions';
import {HOTKEY_TAB} from 'hotkeys/actions';
import {
PAGE_WILL_ACTIVATE,
PAGE_WILL_DEACTIVATE
} from 'pages/actions';
export default function({scope = 'default'} = {}) {
return function reducer(state = {}, action) {
if (action.meta &&
action.meta.mediaScope &&
action.meta.mediaScope !== scope) {
return state;
}
switch (action.type) {
case PAGE_WILL_ACTIVATE:
return {
...state,
hasPlayed: false,
unplayed: true,
infoBoxHiddenDuringPlayback: undefined,
userIsIdle: false
};
case PAGE_WILL_DEACTIVATE:
return {
...state,
shouldPrebuffer: false
};
case PLAY:
return {
...state,
shouldPlay: true,
playFailed: false,
hasBeenPlayingJustNow: true,
unplayed: false,
fadeDuration: null,
isLoading: true
};
case PLAYING:
return {
...state,
shouldPlay: true,
isPlaying: true
};
case PLAY_FAILED:
return {
...state,
shouldPlay: false,
playFailed: true,
hasBeenPlayingJustNow: false,
unplayed: true,
fadeDuration: null,
isLoading: false
};
case PLAY_AND_FADE_IN:
return {
...state,
shouldPlay: true,
hasBeenPlayingJustNow: true,
fadeDuration: action.payload.fadeDuration,
isLoading: true
};
case PAUSE:
return {
...state,
shouldPlay: false,
fadeDuration: null,
isLoading: false
};
case PAUSED:
if (state.bufferUnderrun) {
return {
...state,
isPlaying: false,
hasPlayed: true
};
}
return {
...state,
shouldPlay: false,
isPlaying: false,
fadeDuration: null,
isLoading: false
};
case FADE_OUT_AND_PAUSE:
return {
...state,
shouldPlay: false,
fadeDuration: action.payload.fadeDuration,
isLoading: false
};
case CHANGE_VOLUME_FACTOR:
return {
...state,
volumeFactor: action.payload.volumeFactor,
volumeFactorFadeDuration: action.payload.fadeDuration
};
case PREBUFFER:
return {
...state,
shouldPrebuffer: true
};
case PREBUFFERED:
return {
...state,
shouldPrebuffer: false
};
case WAITING:
return {
...state,
isLoading: true
};
case BUFFER_UNDERRUN:
return {
...state,
bufferUnderrun: true
};
case BUFFER_UNDERRUN_CONTINUE:
return {
...state,
bufferUnderrun: false
};
case SCRUB_TO:
return {
...state,
scrubbingAt: action.payload.time
};
case SEEK_TO:
return {
...state,
shouldSeekTo: action.payload.time
};
case SEEKING:
return {
...state,
isLoading: true
};
case SEEKED:
return {
...state,
scrubbingAt: undefined,
isLoading: false
};
case META_DATA_LOADED:
return {
...state,
currentTime: action.payload.currentTime,
duration: action.payload.duration
};
case PROGRESS:
return {
...state,
bufferedEnd: action.payload.bufferedEnd
};
case TIME_UPDATE:
return {
...state,
currentTime: action.payload.currentTime,
duration: action.payload.duration,
isLoading: false
};
case ENDED:
return {
...state,
shouldPlay: false,
isPlaying: false
};
case HAS_NOT_BEEN_PLAYING_FOR_A_MOMENT:
return {
...state,
hasBeenPlayingJustNow: false
};
case HOTKEY_TAB:
case USER_INTERACTION:
return {
...state,
userIsIdle: false,
controlsHidden: false
};
case USER_IDLE:
return {
...state,
userIsIdle: true,
};
case CONTROLS_ENTERED:
return {
...state,
userHoveringControls: true
};
case CONTROLS_LEFT:
return {
...state,
userHoveringControls: false
};
case FOCUS_ENTERED_CONTROLS:
return {
...state,
focusInsideControls: true
};
case FOCUS_LEFT_CONTROLS:
return {
...state,
focusInsideControls: false
};
case CONTROLS_HIDDEN:
return {
...state,
controlsHidden: true,
infoBoxHiddenDuringPlayback: true
};
case HIDE_INFO_BOX_DURING_PLAYBACK:
return {
...state,
infoBoxHiddenDuringPlayback: true
};
case SHOW_INFO_BOX_DURING_PLAYBACK:
return {
...state,
infoBoxHiddenDuringPlayback: false
};
case TOGGLE_INFO_BOX_DURING_PLAYBACK:
return {
...state,
infoBoxHiddenDuringPlayback: !state.infoBoxHiddenDuringPlayback
};
case SAVE_MEDIA_ELEMENT_ID:
return {
...state,
mediaElementId: action.payload.id
};
case DISCARD_MEDIA_ELEMENT_ID:
return {
...state,
mediaElementId: null
};
default:
return state;
}
};
}
| {'content_hash': 'b8b11a6ba9f7853da0accb6ce02f2d0c', 'timestamp': '', 'source': 'github', 'line_count': 272, 'max_line_length': 96, 'avg_line_length': 21.78676470588235, 'alnum_prop': 0.5507931150860614, 'repo_name': 'tf/pageflow', 'id': '577f3148474523e25e8090a9b428bd8e60ef84e1', 'size': '5926', 'binary': False, 'copies': '2', 'ref': 'refs/heads/master', 'path': 'entry_types/paged/packages/pageflow-paged-react/src/media/createReducer.js', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '106856'}, {'name': 'EJS', 'bytes': '25300'}, {'name': 'HTML', 'bytes': '54060'}, {'name': 'JavaScript', 'bytes': '2693120'}, {'name': 'Procfile', 'bytes': '128'}, {'name': 'Ruby', 'bytes': '1837398'}, {'name': 'SCSS', 'bytes': '302174'}, {'name': 'Shell', 'bytes': '1438'}]} |
import os.path
import requests
import time
from bs4 import BeautifulSoup
from geotext import GeoText as gt
from string import punctuation
from collections import Counter
import re
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
threats = ['loss', 'fragmentation', 'hunting', 'poaching', 'fishing', 'overfishing', 'environmental', 'environment', 'invasive', 'disease', 'pet', 'pollution']
conservation = ['cites', 'protection law', 'captive breeding', 'protected', 'endangered species act', 'wwf', 'wcs']
conservationString = ''
threatString = ''
def findConservation(string):
consFound = []
string = string.lower()
string = string.replace("<p>", "")
global conservation
for word in conservation:
if word in string:
consFound.append(word)
return consFound
def findThreats(string):
threatsFound = []
string = string.lower()
string = string.replace("<p>", "")
global threats
for word in threats:
if word in string:
threatsFound.append(word)
index = string.index(word)
return threatsFound
def parseThrough(string):
string = string.replace(',','')
s = '<p>'
if s in string:
string = string.split(s)[1]
s = '</p>'
if s in string:
string = string.split(s)[0]
return string
def urlNeeded():
global threats
global conservationString
global threatString
allThreats = []
global conservation
allCons = []
f = open('output.txt', "w")
f.write('Scientific Name, Nickname, Common Name, Kingdom, Phylum, Class, Order, Family, Genus, Size, Threats, Conservation, Threat Keywords, Conservation Keywords, status, countries, country_count' + '\n')
with open('test.txt', "rb") as fd:
for line in fd:
line = line.lstrip().rstrip()
url = line
r = requests.get(url)
soup = BeautifulSoup(r.text.encode('utf-8'), 'html.parser')
newName = soup.find('td').text
newName = newName.lstrip().rstrip()
newName = str(newName)
newName = newName.replace(',',';')
f.write(newName + ',')
for t in soup.findAll('h1'):
name = t.text
s = '('
if s in name:
commonName = name.split(s)[0]
scienceName = name.split(s)[1]
scienceName = scienceName.replace(')','')
f.write(scienceName + ',')
print scienceName
f.write(name + ',')
soupsup = soup.findAll('td', align="left")
for node in soupsup:
waant = ''.join(node.findAll(text=True))
waant = str(waant)
waant = waant.replace('\n', '')
f.write(waant + ',')
if "(" in node:
break
items = []
for t in soup.findAll('td'):
items.append(t.text)
check = 9
badge = len(items)
if badge > 6:
f.write(items[badge - 1] + ',')
else:
f.write(',')
badges = soup.findAll("p", class_="Threats")
ofInterest = str(badges)
foundThreats = findThreats(ofInterest)
ofInterest = parseThrough(ofInterest)
threatString = threatString + ofInterest
if ofInterest:
f.write(ofInterest)
f.write(',')
else:
f.write(' ,')
badges = soup.findAll("p", class_="Conservation")
ofInterest = str(badges)
foundCons = findConservation(ofInterest)
ofInterest = parseThrough(ofInterest)
conservationString = conservationString + ofInterest
badges = soup.findAll("p", class_="Range")
badges = str(badges)
countries = gt(badges).country_mentions
countries = str(countries)
#countries = re.sub('[^A-Z]', '', s)
countries = countries.replace(',', '')
cCount = sum(c.isdigit() for c in countries)
cCount = str(cCount)
print cCount
status = soup.findAll("p", class_="Status")
status = str(status)
if 'Critically' in status:
status = 'Critically Endangered'
else:
status = 'Endangered'
if ofInterest:
f.write(ofInterest)
f.write(' ,' + '')
else:
f.write(' ,')
for node in foundThreats:
f.write(node)
f.write(';')
f.write(' ,')
for node in foundCons:
f.write(node)
f.write(';')
f.write(' ,')
f.write(status)
f.write(',')
f.write(countries)
f.write(',')
f.write(cCount)
f.write('\n')
fd.close()
f.close()
def main():
urlNeeded()
main() | {'content_hash': '033522886ed4b2a107b8298ce9a780d0', 'timestamp': '', 'source': 'github', 'line_count': 187, 'max_line_length': 209, 'avg_line_length': 28.037433155080215, 'alnum_prop': 0.49704367728399773, 'repo_name': 'andrewedstrom/cs638project', 'id': 'a5193b75298411d495659b3afa3f62dad37ab7ad', 'size': '5243', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'arkive table analysis/parse.py', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'HTML', 'bytes': '514473381'}, {'name': 'Jupyter Notebook', 'bytes': '2192214'}, {'name': 'Python', 'bytes': '30115'}, {'name': 'R', 'bytes': '9081'}]} |
<?php
/* vim: set expandtab tabstop=4 shiftwidth=4 softtabstop=4: */
/**
* The PEAR DB driver for PHP's oci8 extension
* for interacting with Oracle databases
*
* PHP versions 4 and 5
*
* LICENSE: This source file is subject to version 3.0 of the PHP license
* that is available through the world-wide-web at the following URI:
* http://www.php.net/license/3_0.txt. If you did not receive a copy of
* the PHP License and are unable to obtain it through the web, please
* send a note to [email protected] so we can mail you a copy immediately.
*
* @category Database
* @package DB
* @author James L. Pine <[email protected]>
* @author Daniel Convissor <[email protected]>
* @copyright 1997-2005 The PHP Group
* @license http://www.php.net/license/3_0.txt PHP License 3.0
* @version CVS: $Id: oci8.php,v 1.103 2005/04/11 15:10:22 danielc Exp $
* @link http://pear.php.net/package/DB
*/
/**
* Obtain the DB_common class so it can be extended from
*/
require_once 'DB/common.php';
/**
* The methods PEAR DB uses to interact with PHP's oci8 extension
* for interacting with Oracle databases
*
* Definitely works with versions 8 and 9 of Oracle.
*
* These methods overload the ones declared in DB_common.
*
* Be aware... OCIError() only appears to return anything when given a
* statement, so functions return the generic DB_ERROR instead of more
* useful errors that have to do with feedback from the database.
*
* @category Database
* @package DB
* @author James L. Pine <[email protected]>
* @author Daniel Convissor <[email protected]>
* @copyright 1997-2005 The PHP Group
* @license http://www.php.net/license/3_0.txt PHP License 3.0
* @version Release: @package_version@
* @link http://pear.php.net/package/DB
*/
class DB_oci8 extends DB_common
{
// {{{ properties
/**
* The DB driver type (mysql, oci8, odbc, etc.)
* @var string
*/
var $phptype = 'oci8';
/**
* The database syntax variant to be used (db2, access, etc.), if any
* @var string
*/
var $dbsyntax = 'oci8';
/**
* The capabilities of this DB implementation
*
* The 'new_link' element contains the PHP version that first provided
* new_link support for this DBMS. Contains false if it's unsupported.
*
* Meaning of the 'limit' element:
* + 'emulate' = emulate with fetch row by number
* + 'alter' = alter the query
* + false = skip rows
*
* @var array
*/
var $features = array(
'limit' => 'alter',
'new_link' => '5.0.0',
'numrows' => 'subquery',
'pconnect' => true,
'prepare' => true,
'ssl' => false,
'transactions' => true,
);
/**
* A mapping of native error codes to DB error codes
* @var array
*/
var $errorcode_map = array(
1 => DB_ERROR_CONSTRAINT,
900 => DB_ERROR_SYNTAX,
904 => DB_ERROR_NOSUCHFIELD,
913 => DB_ERROR_VALUE_COUNT_ON_ROW,
921 => DB_ERROR_SYNTAX,
923 => DB_ERROR_SYNTAX,
942 => DB_ERROR_NOSUCHTABLE,
955 => DB_ERROR_ALREADY_EXISTS,
1400 => DB_ERROR_CONSTRAINT_NOT_NULL,
1401 => DB_ERROR_INVALID,
1407 => DB_ERROR_CONSTRAINT_NOT_NULL,
1418 => DB_ERROR_NOT_FOUND,
1476 => DB_ERROR_DIVZERO,
1722 => DB_ERROR_INVALID_NUMBER,
2289 => DB_ERROR_NOSUCHTABLE,
2291 => DB_ERROR_CONSTRAINT,
2292 => DB_ERROR_CONSTRAINT,
2449 => DB_ERROR_CONSTRAINT,
);
/**
* The raw database connection created by PHP
* @var resource
*/
var $connection;
/**
* The DSN information for connecting to a database
* @var array
*/
var $dsn = array();
/**
* Should data manipulation queries be committed automatically?
* @var bool
* @access private
*/
var $autocommit = true;
/**
* Stores the $data passed to execute() in the oci8 driver
*
* Gets reset to array() when simpleQuery() is run.
*
* Needed in case user wants to call numRows() after prepare/execute
* was used.
*
* @var array
* @access private
*/
var $_data = array();
/**
* The result or statement handle from the most recently executed query
* @var resource
*/
var $last_stmt;
/**
* Is the given prepared statement a data manipulation query?
* @var array
* @access private
*/
var $manip_query = array();
// }}}
// {{{ constructor
/**
* This constructor calls <kbd>$this->DB_common()</kbd>
*
* @return void
*/
function DB_oci8()
{
$this->DB_common();
}
// }}}
// {{{ connect()
/**
* Connect to the database server, log in and open the database
*
* Don't call this method directly. Use DB::connect() instead.
*
* If PHP is at version 5.0.0 or greater:
* + Generally, oci_connect() or oci_pconnect() are used.
* + But if the new_link DSN option is set to true, oci_new_connect()
* is used.
*
* When using PHP version 4.x, OCILogon() or OCIPLogon() are used.
*
* PEAR DB's oci8 driver supports the following extra DSN options:
* + charset The character set to be used on the connection.
* Only used if PHP is at version 5.0.0 or greater
* and the Oracle server is at 9.2 or greater.
* Available since PEAR DB 1.7.0.
* + new_link If set to true, causes subsequent calls to
* connect() to return a new connection link
* instead of the existing one. WARNING: this is
* not portable to other DBMS's.
* Available since PEAR DB 1.7.0.
*
* @param array $dsn the data source name
* @param bool $persistent should the connection be persistent?
*
* @return int DB_OK on success. A DB_Error object on failure.
*/
function connect($dsn, $persistent = false)
{
if (!PEAR::loadExtension('oci8')) {
return $this->raiseError(DB_ERROR_EXTENSION_NOT_FOUND);
}
$this->dsn = $dsn;
if ($dsn['dbsyntax']) {
$this->dbsyntax = $dsn['dbsyntax'];
}
if (function_exists('oci_connect')) {
if (isset($dsn['new_link'])
&& ($dsn['new_link'] == 'true' || $dsn['new_link'] === true))
{
$connect_function = 'oci_new_connect';
} else {
$connect_function = $persistent ? 'oci_pconnect'
: 'oci_connect';
}
// Backwards compatibility with DB < 1.7.0
if (empty($dsn['database']) && !empty($dsn['hostspec'])) {
$db = $dsn['hostspec'];
} else {
$db = $dsn['database'];
}
$char = empty($dsn['charset']) ? null : $dsn['charset'];
$this->connection = @$connect_function($dsn['username'],
$dsn['password'],
$db,
$char);
$error = OCIError();
if (!empty($error) && $error['code'] == 12541) {
// Couldn't find TNS listener. Try direct connection.
$this->connection = @$connect_function($dsn['username'],
$dsn['password'],
null,
$char);
}
} else {
$connect_function = $persistent ? 'OCIPLogon' : 'OCILogon';
if ($dsn['hostspec']) {
$this->connection = @$connect_function($dsn['username'],
$dsn['password'],
$dsn['hostspec']);
} elseif ($dsn['username'] || $dsn['password']) {
$this->connection = @$connect_function($dsn['username'],
$dsn['password']);
}
}
if (!$this->connection) {
$error = OCIError();
$error = (is_array($error)) ? $error['message'] : null;
return $this->raiseError(DB_ERROR_CONNECT_FAILED,
null, null, null,
$error);
}
return DB_OK;
}
// }}}
// {{{ disconnect()
/**
* Disconnects from the database server
*
* @return bool TRUE on success, FALSE on failure
*/
function disconnect()
{
if (function_exists('oci_close')) {
$ret = @oci_close($this->connection);
} else {
$ret = @OCILogOff($this->connection);
}
$this->connection = null;
return $ret;
}
// }}}
// {{{ simpleQuery()
/**
* Sends a query to the database server
*
* To determine how many rows of a result set get buffered using
* ocisetprefetch(), see the "result_buffering" option in setOptions().
* This option was added in Release 1.7.0.
*
* @param string the SQL query string
*
* @return mixed + a PHP result resrouce for successful SELECT queries
* + the DB_OK constant for other successful queries
* + a DB_Error object on failure
*/
function simpleQuery($query)
{
$this->_data = array();
$this->last_parameters = array();
$this->last_query = $query;
$query = $this->modifyQuery($query);
$result = @OCIParse($this->connection, $query);
if (!$result) {
return $this->oci8RaiseError();
}
if ($this->autocommit) {
$success = @OCIExecute($result,OCI_COMMIT_ON_SUCCESS);
} else {
$success = @OCIExecute($result,OCI_DEFAULT);
}
if (!$success) {
return $this->oci8RaiseError($result);
}
$this->last_stmt = $result;
if (DB::isManip($query)) {
return DB_OK;
} else {
@ocisetprefetch($result, $this->options['result_buffering']);
return $result;
}
}
// }}}
// {{{ nextResult()
/**
* Move the internal oracle result pointer to the next available result
*
* @param a valid oci8 result resource
*
* @access public
*
* @return true if a result is available otherwise return false
*/
function nextResult($result)
{
return false;
}
// }}}
// {{{ fetchInto()
/**
* Places a row from the result set into the given array
*
* Formating of the array and the data therein are configurable.
* See DB_result::fetchInto() for more information.
*
* This method is not meant to be called directly. Use
* DB_result::fetchInto() instead. It can't be declared "protected"
* because DB_result is a separate object.
*
* @param resource $result the query result resource
* @param array $arr the referenced array to put the data in
* @param int $fetchmode how the resulting array should be indexed
* @param int $rownum the row number to fetch (0 = first row)
*
* @return mixed DB_OK on success, NULL when the end of a result set is
* reached or on failure
*
* @see DB_result::fetchInto()
*/
function fetchInto($result, &$arr, $fetchmode, $rownum = null)
{
if ($rownum !== null) {
return $this->raiseError(DB_ERROR_NOT_CAPABLE);
}
if ($fetchmode & DB_FETCHMODE_ASSOC) {
$moredata = @OCIFetchInto($result,$arr,OCI_ASSOC+OCI_RETURN_NULLS+OCI_RETURN_LOBS);
if ($this->options['portability'] & DB_PORTABILITY_LOWERCASE &&
$moredata)
{
$arr = array_change_key_case($arr, CASE_LOWER);
}
} else {
$moredata = OCIFetchInto($result,$arr,OCI_RETURN_NULLS+OCI_RETURN_LOBS);
}
if (!$moredata) {
return null;
}
if ($this->options['portability'] & DB_PORTABILITY_RTRIM) {
$this->_rtrimArrayValues($arr);
}
if ($this->options['portability'] & DB_PORTABILITY_NULL_TO_EMPTY) {
$this->_convertNullArrayValuesToEmpty($arr);
}
return DB_OK;
}
// }}}
// {{{ freeResult()
/**
* Deletes the result set and frees the memory occupied by the result set
*
* This method is not meant to be called directly. Use
* DB_result::free() instead. It can't be declared "protected"
* because DB_result is a separate object.
*
* @param resource $result PHP's query result resource
*
* @return bool TRUE on success, FALSE if $result is invalid
*
* @see DB_result::free()
*/
function freeResult($result)
{
return @OCIFreeStatement($result);
}
/**
* Frees the internal resources associated with a prepared query
*
* @param resource $stmt the prepared statement's resource
* @param bool $free_resource should the PHP resource be freed too?
* Use false if you need to get data
* from the result set later.
*
* @return bool TRUE on success, FALSE if $result is invalid
*
* @see DB_oci8::prepare()
*/
function freePrepared($stmt, $free_resource = true)
{
if (!is_resource($stmt)) {
return false;
}
if ($free_resource) {
@ocifreestatement($stmt);
}
if (isset($this->prepare_types[(int)$stmt])) {
unset($this->prepare_types[(int)$stmt]);
unset($this->manip_query[(int)$stmt]);
} else {
return false;
}
return true;
}
// }}}
// {{{ numRows()
/**
* Gets the number of rows in a result set
*
* Only works if the DB_PORTABILITY_NUMROWS portability option
* is turned on.
*
* This method is not meant to be called directly. Use
* DB_result::numRows() instead. It can't be declared "protected"
* because DB_result is a separate object.
*
* @param resource $result PHP's query result resource
*
* @return int the number of rows. A DB_Error object on failure.
*
* @see DB_result::numRows(), DB_common::setOption()
*/
function numRows($result)
{
// emulate numRows for Oracle. yuck.
if ($this->options['portability'] & DB_PORTABILITY_NUMROWS &&
$result === $this->last_stmt)
{
$countquery = 'SELECT COUNT(*) FROM ('.$this->last_query.')';
$save_query = $this->last_query;
$save_stmt = $this->last_stmt;
if (count($this->_data)) {
$smt = $this->prepare('SELECT COUNT(*) FROM ('.$this->last_query.')');
$count = $this->execute($smt, $this->_data);
} else {
$count =& $this->query($countquery);
}
if (DB::isError($count) ||
DB::isError($row = $count->fetchRow(DB_FETCHMODE_ORDERED)))
{
$this->last_query = $save_query;
$this->last_stmt = $save_stmt;
return $this->raiseError(DB_ERROR_NOT_CAPABLE);
}
return $row[0];
}
return $this->raiseError(DB_ERROR_NOT_CAPABLE);
}
// }}}
// {{{ numCols()
/**
* Gets the number of columns in a result set
*
* This method is not meant to be called directly. Use
* DB_result::numCols() instead. It can't be declared "protected"
* because DB_result is a separate object.
*
* @param resource $result PHP's query result resource
*
* @return int the number of columns. A DB_Error object on failure.
*
* @see DB_result::numCols()
*/
function numCols($result)
{
$cols = @OCINumCols($result);
if (!$cols) {
return $this->oci8RaiseError($result);
}
return $cols;
}
// }}}
// {{{ prepare()
/**
* Prepares a query for multiple execution with execute().
*
* With oci8, this is emulated.
*
* prepare() requires a generic query as string like <code>
* INSERT INTO numbers VALUES (?, ?, ?)
* </code>. The <kbd>?</kbd> characters are placeholders.
*
* Three types of placeholders can be used:
* + <kbd>?</kbd> a quoted scalar value, i.e. strings, integers
* + <kbd>!</kbd> value is inserted 'as is'
* + <kbd>&</kbd> requires a file name. The file's contents get
* inserted into the query (i.e. saving binary
* data in a db)
*
* Use backslashes to escape placeholder characters if you don't want
* them to be interpreted as placeholders. Example: <code>
* "UPDATE foo SET col=? WHERE col='over \& under'"
* </code>
*
* @param string $query the query to be prepared
*
* @return mixed DB statement resource on success. DB_Error on failure.
*
* @see DB_oci8::execute()
*/
function prepare($query)
{
$tokens = preg_split('/((?<!\\\)[&?!])/', $query, -1,
PREG_SPLIT_DELIM_CAPTURE);
$binds = count($tokens) - 1;
$token = 0;
$types = array();
$newquery = '';
foreach ($tokens as $key => $val) {
switch ($val) {
case '?':
$types[$token++] = DB_PARAM_SCALAR;
unset($tokens[$key]);
break;
case '&':
$types[$token++] = DB_PARAM_OPAQUE;
unset($tokens[$key]);
break;
case '!':
$types[$token++] = DB_PARAM_MISC;
unset($tokens[$key]);
break;
default:
$tokens[$key] = preg_replace('/\\\([&?!])/', "\\1", $val);
if ($key != $binds) {
$newquery .= $tokens[$key] . ':bind' . $token;
} else {
$newquery .= $tokens[$key];
}
}
}
$this->last_query = $query;
$newquery = $this->modifyQuery($newquery);
if (!$stmt = @OCIParse($this->connection, $newquery)) {
return $this->oci8RaiseError();
}
$this->prepare_types[(int)$stmt] = $types;
$this->manip_query[(int)$stmt] = DB::isManip($query);
return $stmt;
}
// }}}
// {{{ execute()
/**
* Executes a DB statement prepared with prepare().
*
* To determine how many rows of a result set get buffered using
* ocisetprefetch(), see the "result_buffering" option in setOptions().
* This option was added in Release 1.7.0.
*
* @param resource $stmt a DB statement resource returned from prepare()
* @param mixed $data array, string or numeric data to be used in
* execution of the statement. Quantity of items
* passed must match quantity of placeholders in
* query: meaning 1 for non-array items or the
* quantity of elements in the array.
*
* @return mixed returns an oic8 result resource for successful SELECT
* queries, DB_OK for other successful queries.
* A DB error object is returned on failure.
*
* @see DB_oci8::prepare()
*/
function &execute($stmt, $data = array())
{
$data = (array)$data;
$this->last_parameters = $data;
$this->_data = $data;
$types =& $this->prepare_types[(int)$stmt];
if (count($types) != count($data)) {
$tmp =& $this->raiseError(DB_ERROR_MISMATCH);
return $tmp;
}
$i = 0;
foreach ($data as $key => $value) {
if ($types[$i] == DB_PARAM_MISC) {
/*
* Oracle doesn't seem to have the ability to pass a
* parameter along unchanged, so strip off quotes from start
* and end, plus turn two single quotes to one single quote,
* in order to avoid the quotes getting escaped by
* Oracle and ending up in the database.
*/
$data[$key] = preg_replace("/^'(.*)'$/", "\\1", $data[$key]);
$data[$key] = str_replace("''", "'", $data[$key]);
} elseif ($types[$i] == DB_PARAM_OPAQUE) {
$fp = @fopen($data[$key], 'rb');
if (!$fp) {
$tmp =& $this->raiseError(DB_ERROR_ACCESS_VIOLATION);
return $tmp;
}
$data[$key] = fread($fp, filesize($data[$key]));
fclose($fp);
}
if (!@OCIBindByName($stmt, ':bind' . $i, $data[$key], -1)) {
$tmp = $this->oci8RaiseError($stmt);
return $tmp;
}
$i++;
}
if ($this->autocommit) {
$success = @OCIExecute($stmt, OCI_COMMIT_ON_SUCCESS);
} else {
$success = @OCIExecute($stmt, OCI_DEFAULT);
}
if (!$success) {
$tmp = $this->oci8RaiseError($stmt);
return $tmp;
}
$this->last_stmt = $stmt;
if ($this->manip_query[(int)$stmt]) {
$tmp = DB_OK;
} else {
@ocisetprefetch($stmt, $this->options['result_buffering']);
$tmp =& new DB_result($this, $stmt);
}
return $tmp;
}
// }}}
// {{{ autoCommit()
/**
* Enables or disables automatic commits
*
* @param bool $onoff true turns it on, false turns it off
*
* @return int DB_OK on success. A DB_Error object if the driver
* doesn't support auto-committing transactions.
*/
function autoCommit($onoff = false)
{
$this->autocommit = (bool)$onoff;;
return DB_OK;
}
// }}}
// {{{ commit()
/**
* Commits the current transaction
*
* @return int DB_OK on success. A DB_Error object on failure.
*/
function commit()
{
$result = @OCICommit($this->connection);
if (!$result) {
return $this->oci8RaiseError();
}
return DB_OK;
}
// }}}
// {{{ rollback()
/**
* Reverts the current transaction
*
* @return int DB_OK on success. A DB_Error object on failure.
*/
function rollback()
{
$result = @OCIRollback($this->connection);
if (!$result) {
return $this->oci8RaiseError();
}
return DB_OK;
}
// }}}
// {{{ affectedRows()
/**
* Determines the number of rows affected by a data maniuplation query
*
* 0 is returned for queries that don't manipulate data.
*
* @return int the number of rows. A DB_Error object on failure.
*/
function affectedRows()
{
if ($this->last_stmt === false) {
return $this->oci8RaiseError();
}
$result = @OCIRowCount($this->last_stmt);
if ($result === false) {
return $this->oci8RaiseError($this->last_stmt);
}
return $result;
}
// }}}
// {{{ modifyQuery()
/**
* Changes a query string for various DBMS specific reasons
*
* "SELECT 2+2" must be "SELECT 2+2 FROM dual" in Oracle.
*
* @param string $query the query string to modify
*
* @return string the modified query string
*
* @access protected
*/
function modifyQuery($query)
{
if (preg_match('/^\s*SELECT/i', $query) &&
!preg_match('/\sFROM\s/i', $query)) {
$query .= ' FROM dual';
}
return $query;
}
// }}}
// {{{ modifyLimitQuery()
/**
* Adds LIMIT clauses to a query string according to current DBMS standards
*
* @param string $query the query to modify
* @param int $from the row to start to fetching (0 = the first row)
* @param int $count the numbers of rows to fetch
* @param mixed $params array, string or numeric data to be used in
* execution of the statement. Quantity of items
* passed must match quantity of placeholders in
* query: meaning 1 placeholder for non-array
* parameters or 1 placeholder per array element.
*
* @return string the query string with LIMIT clauses added
*
* @access protected
*/
function modifyLimitQuery($query, $from, $count, $params = array())
{
// Let Oracle return the name of the columns instead of
// coding a "home" SQL parser
if (count($params)) {
$result = $this->prepare("SELECT * FROM ($query) "
. 'WHERE NULL = NULL');
$tmp =& $this->execute($result, $params);
} else {
$q_fields = "SELECT * FROM ($query) WHERE NULL = NULL";
if (!$result = @OCIParse($this->connection, $q_fields)) {
$this->last_query = $q_fields;
return $this->oci8RaiseError();
}
if (!@OCIExecute($result, OCI_DEFAULT)) {
$this->last_query = $q_fields;
return $this->oci8RaiseError($result);
}
}
$ncols = OCINumCols($result);
$cols = array();
for ( $i = 1; $i <= $ncols; $i++ ) {
$cols[] = '"' . OCIColumnName($result, $i) . '"';
}
$fields = implode(', ', $cols);
// XXX Test that (tip by John Lim)
//if (preg_match('/^\s*SELECT\s+/is', $query, $match)) {
// // Introduce the FIRST_ROWS Oracle query optimizer
// $query = substr($query, strlen($match[0]), strlen($query));
// $query = "SELECT /* +FIRST_ROWS */ " . $query;
//}
// Construct the query
// more at: http://marc.theaimsgroup.com/?l=php-db&m=99831958101212&w=2
// Perhaps this could be optimized with the use of Unions
$query = "SELECT $fields FROM".
" (SELECT rownum as linenum, $fields FROM".
" ($query)".
' WHERE rownum <= '. ($from + $count) .
') WHERE linenum >= ' . ++$from;
return $query;
}
// }}}
// {{{ nextId()
/**
* Returns the next free id in a sequence
*
* @param string $seq_name name of the sequence
* @param boolean $ondemand when true, the seqence is automatically
* created if it does not exist
*
* @return int the next id number in the sequence.
* A DB_Error object on failure.
*
* @see DB_common::nextID(), DB_common::getSequenceName(),
* DB_oci8::createSequence(), DB_oci8::dropSequence()
*/
function nextId($seq_name, $ondemand = true)
{
$seqname = $this->getSequenceName($seq_name);
$repeat = 0;
do {
$this->expectError(DB_ERROR_NOSUCHTABLE);
$result =& $this->query("SELECT ${seqname}.nextval FROM dual");
$this->popExpect();
if ($ondemand && DB::isError($result) &&
$result->getCode() == DB_ERROR_NOSUCHTABLE) {
$repeat = 1;
$result = $this->createSequence($seq_name);
if (DB::isError($result)) {
return $this->raiseError($result);
}
} else {
$repeat = 0;
}
} while ($repeat);
if (DB::isError($result)) {
return $this->raiseError($result);
}
$arr = $result->fetchRow(DB_FETCHMODE_ORDERED);
return $arr[0];
}
/**
* Creates a new sequence
*
* @param string $seq_name name of the new sequence
*
* @return int DB_OK on success. A DB_Error object on failure.
*
* @see DB_common::createSequence(), DB_common::getSequenceName(),
* DB_oci8::nextID(), DB_oci8::dropSequence()
*/
function createSequence($seq_name)
{
return $this->query('CREATE SEQUENCE '
. $this->getSequenceName($seq_name));
}
// }}}
// {{{ dropSequence()
/**
* Deletes a sequence
*
* @param string $seq_name name of the sequence to be deleted
*
* @return int DB_OK on success. A DB_Error object on failure.
*
* @see DB_common::dropSequence(), DB_common::getSequenceName(),
* DB_oci8::nextID(), DB_oci8::createSequence()
*/
function dropSequence($seq_name)
{
return $this->query('DROP SEQUENCE '
. $this->getSequenceName($seq_name));
}
// }}}
// {{{ oci8RaiseError()
/**
* Produces a DB_Error object regarding the current problem
*
* @param int $errno if the error is being manually raised pass a
* DB_ERROR* constant here. If this isn't passed
* the error information gathered from the DBMS.
*
* @return object the DB_Error object
*
* @see DB_common::raiseError(),
* DB_oci8::errorNative(), DB_oci8::errorCode()
*/
function oci8RaiseError($errno = null)
{
if ($errno === null) {
$error = @OCIError($this->connection);
return $this->raiseError($this->errorCode($error['code']),
null, null, null, $error['message']);
} elseif (is_resource($errno)) {
$error = @OCIError($errno);
return $this->raiseError($this->errorCode($error['code']),
null, null, null, $error['message']);
}
return $this->raiseError($this->errorCode($errno));
}
// }}}
// {{{ errorNative()
/**
* Gets the DBMS' native error code produced by the last query
*
* @return int the DBMS' error code. FALSE if the code could not be
* determined
*/
function errorNative()
{
if (is_resource($this->last_stmt)) {
$error = @OCIError($this->last_stmt);
} else {
$error = @OCIError($this->connection);
}
if (is_array($error)) {
return $error['code'];
}
return false;
}
// }}}
// {{{ tableInfo()
/**
* Returns information about a table or a result set
*
* NOTE: only supports 'table' and 'flags' if <var>$result</var>
* is a table name.
*
* NOTE: flags won't contain index information.
*
* @param object|string $result DB_result object from a query or a
* string containing the name of a table.
* While this also accepts a query result
* resource identifier, this behavior is
* deprecated.
* @param int $mode a valid tableInfo mode
*
* @return array an associative array with the information requested.
* A DB_Error object on failure.
*
* @see DB_common::tableInfo()
*/
function tableInfo($result, $mode = null)
{
if ($this->options['portability'] & DB_PORTABILITY_LOWERCASE) {
$case_func = 'strtolower';
} else {
$case_func = 'strval';
}
$res = array();
if (is_string($result)) {
/*
* Probably received a table name.
* Create a result resource identifier.
*/
$result = strtoupper($result);
$q_fields = 'SELECT column_name, data_type, data_length, '
. 'nullable '
. 'FROM user_tab_columns '
. "WHERE table_name='$result' ORDER BY column_id";
$this->last_query = $q_fields;
if (!$stmt = @OCIParse($this->connection, $q_fields)) {
return $this->oci8RaiseError(DB_ERROR_NEED_MORE_DATA);
}
if (!@OCIExecute($stmt, OCI_DEFAULT)) {
return $this->oci8RaiseError($stmt);
}
$i = 0;
while (@OCIFetch($stmt)) {
$res[$i] = array(
'table' => $case_func($result),
'name' => $case_func(@OCIResult($stmt, 1)),
'type' => @OCIResult($stmt, 2),
'len' => @OCIResult($stmt, 3),
'flags' => (@OCIResult($stmt, 4) == 'N') ? 'not_null' : '',
);
if ($mode & DB_TABLEINFO_ORDER) {
$res['order'][$res[$i]['name']] = $i;
}
if ($mode & DB_TABLEINFO_ORDERTABLE) {
$res['ordertable'][$res[$i]['table']][$res[$i]['name']] = $i;
}
$i++;
}
if ($mode) {
$res['num_fields'] = $i;
}
@OCIFreeStatement($stmt);
} else {
if (isset($result->result)) {
/*
* Probably received a result object.
* Extract the result resource identifier.
*/
$result = $result->result;
}
$res = array();
if ($result === $this->last_stmt) {
$count = @OCINumCols($result);
if ($mode) {
$res['num_fields'] = $count;
}
for ($i = 0; $i < $count; $i++) {
$res[$i] = array(
'table' => '',
'name' => $case_func(@OCIColumnName($result, $i+1)),
'type' => @OCIColumnType($result, $i+1),
'len' => @OCIColumnSize($result, $i+1),
'flags' => '',
);
if ($mode & DB_TABLEINFO_ORDER) {
$res['order'][$res[$i]['name']] = $i;
}
if ($mode & DB_TABLEINFO_ORDERTABLE) {
$res['ordertable'][$res[$i]['table']][$res[$i]['name']] = $i;
}
}
} else {
return $this->raiseError(DB_ERROR_NOT_CAPABLE);
}
}
return $res;
}
// }}}
// {{{ getSpecialQuery()
/**
* Obtains the query string needed for listing a given type of objects
*
* @param string $type the kind of objects you want to retrieve
*
* @return string the SQL query string or null if the driver doesn't
* support the object type requested
*
* @access protected
* @see DB_common::getListOf()
*/
function getSpecialQuery($type)
{
switch ($type) {
case 'tables':
return 'SELECT table_name FROM user_tables';
case 'synonyms':
return 'SELECT synonym_name FROM user_synonyms';
default:
return null;
}
}
// }}}
}
/*
* Local variables:
* tab-width: 4
* c-basic-offset: 4
* End:
*/
?>
| {'content_hash': 'dc42cd00866890e91f9235938a9acf87', 'timestamp': '', 'source': 'github', 'line_count': 1117, 'max_line_length': 95, 'avg_line_length': 32.43151298119964, 'alnum_prop': 0.49216032683707833, 'repo_name': 'Southbay-CityChurch/crossings-community', 'id': 'ee8c956e98a4bb557e19d2a426b84fe4f1c59a80', 'size': '36226', 'binary': False, 'copies': '7', 'ref': 'refs/heads/master', 'path': 'sermons/__lib/vendor/DB/oci8.php', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '51174'}, {'name': 'PHP', 'bytes': '232532'}]} |
using Microsoft.VisualStudio.TestTools.UnitTesting;
namespace Dedox.Tests
{
[TestClass]
public class FieldCommentsTest : CommentsTest
{
[TestMethod]
public void StripsBasicPattern()
{
const string text = @"
public class Cat
{
/// <summary>
/// The lol wut.
/// </summary>
public string LolWut;
}
";
const string expected = @"
public class Cat
{
public string LolWut;
}
";
VerifyStrip(text, expected);
}
}
} | {'content_hash': 'd2df4258930518f274a7d537bbc90756', 'timestamp': '', 'source': 'github', 'line_count': 29, 'max_line_length': 52, 'avg_line_length': 17.620689655172413, 'alnum_prop': 0.5792563600782779, 'repo_name': 'einarwh/Dedox', 'id': 'add2f4a7f5b5a3b9b1040689194c86d41fb3a554', 'size': '513', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'Dedox.Tests/FieldCommentsTest.cs', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'C#', 'bytes': '89995'}]} |
package app.android.box.waveprotocol.org.androidwave.service.documents;
import com.google.common.base.Preconditions;
import org.waveprotocol.wave.model.conversation.ConversationBlip;
import org.waveprotocol.wave.model.document.operation.DocInitialization;
import org.waveprotocol.wave.model.id.IdUtil;
import org.waveprotocol.wave.model.id.ModernIdSerialiser;
import org.waveprotocol.wave.model.id.WaveletId;
import org.waveprotocol.wave.model.util.CollectionUtils;
import org.waveprotocol.wave.model.util.StringMap;
import org.waveprotocol.wave.model.wave.data.DocumentFactory;
import org.waveprotocol.wave.model.wave.data.DocumentOperationSink;
public final class WaveDocuments<BlipDocument extends DocumentOperationSink>
implements DocumentFactory<DocumentOperationSink>, DocumentRegistry<BlipDocument> {
private final DocumentFactory<BlipDocument> blipDocFactory;
private final DocumentFactory<?> dataDocFactory;
private final StringMap<StringMap<BlipDocument>> blips = CollectionUtils.createStringMap();
private WaveDocuments(DocumentFactory<BlipDocument> blip, DocumentFactory<?> data) {
this.blipDocFactory = blip;
this.dataDocFactory = data;
}
public static <B extends DocumentOperationSink> WaveDocuments<B> create(
DocumentFactory<B> blipDocFactory, DocumentFactory<?> dataDocFactory) {
return new WaveDocuments<B>(blipDocFactory, dataDocFactory);
}
@Override
public DocumentOperationSink create(
final WaveletId waveletId, final String blipId, final DocInitialization content) {
String waveletIdStr = ModernIdSerialiser.INSTANCE.serialiseWaveletId(waveletId);
if (IdUtil.isBlipId(blipId)) {
BlipDocument document = blipDocFactory.create(waveletId, blipId, content);
StringMap<BlipDocument> convDocuments = getConversationDocuments(waveletIdStr);
Preconditions.checkState(!convDocuments.containsKey(blipId));
convDocuments.put(blipId, document);
return document;
} else {
return dataDocFactory.create(waveletId, blipId, content);
}
}
private StringMap<BlipDocument> getConversationDocuments(String id) {
StringMap<BlipDocument> convDocuments = blips.get(id);
if (convDocuments == null) {
convDocuments = CollectionUtils.createStringMap();
blips.put(id, convDocuments);
}
return convDocuments;
}
public BlipDocument get(ConversationBlip blip) {
return getBlipDocument(blip.getConversation().getId(), blip.getId());
}
public BlipDocument getBlipDocument(String waveletId, String docId) {
StringMap<BlipDocument> convDocuments = blips.get(waveletId);
return convDocuments != null ? convDocuments.get(docId) : null;
}
}
| {'content_hash': 'fc2563e5da880c7c22cfd770674d692e', 'timestamp': '', 'source': 'github', 'line_count': 65, 'max_line_length': 95, 'avg_line_length': 43.707692307692305, 'alnum_prop': 0.7388243576205561, 'repo_name': 'TinTin911/android-wave', 'id': '114776b8629d296d990d40788e0d1b07d6357ff1', 'size': '2841', 'binary': False, 'copies': '3', 'ref': 'refs/heads/master', 'path': 'app/src/main/java/app/android/box/waveprotocol/org/androidwave/service/documents/WaveDocuments.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Java', 'bytes': '155740'}]} |
package parquet.io.api;
import java.io.DataOutput;
import java.io.IOException;
import java.io.ObjectStreamException;
import java.io.OutputStream;
import java.io.Serializable;
import java.io.UnsupportedEncodingException;
import java.nio.ByteBuffer;
import java.util.Arrays;
import parquet.bytes.BytesUtils;
import parquet.io.ParquetEncodingException;
import static parquet.bytes.BytesUtils.UTF8;
abstract public class Binary implements Comparable<Binary>, Serializable {
// this isn't really something others should extend
private Binary() { }
public static final Binary EMPTY = fromByteArray(new byte[0]);
abstract public String toStringUsingUTF8();
abstract public int length();
abstract public void writeTo(OutputStream out) throws IOException;
abstract public void writeTo(DataOutput out) throws IOException;
abstract public byte[] getBytes();
abstract boolean equals(byte[] bytes, int offset, int length);
abstract boolean equals(Binary other);
abstract public int compareTo(Binary other);
abstract int compareTo(byte[] bytes, int offset, int length);
abstract public ByteBuffer toByteBuffer();
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (obj instanceof Binary) {
return equals((Binary)obj);
}
return false;
}
@Override
public String toString() {
return "Binary{" + length() + " bytes, " + Arrays.toString(getBytes()) + "}";
}
private static class ByteArraySliceBackedBinary extends Binary {
private final byte[] value;
private final int offset;
private final int length;
public ByteArraySliceBackedBinary(byte[] value, int offset, int length) {
this.value = value;
this.offset = offset;
this.length = length;
}
@Override
public String toStringUsingUTF8() {
return UTF8.decode(ByteBuffer.wrap(value, offset, length)).toString();
// TODO: figure out why the following line was much slower
// rdb: new String(...) is slower because it instantiates a new Decoder,
// while Charset#decode uses a thread-local decoder cache
// return new String(value, offset, length, BytesUtils.UTF8);
}
@Override
public int length() {
return length;
}
@Override
public void writeTo(OutputStream out) throws IOException {
out.write(value, offset, length);
}
@Override
public byte[] getBytes() {
return Arrays.copyOfRange(value, offset, offset + length);
}
@Override
public int hashCode() {
return Binary.hashCode(value, offset, length);
}
@Override
boolean equals(Binary other) {
return other.equals(value, offset, length);
}
@Override
boolean equals(byte[] other, int otherOffset, int otherLength) {
return Binary.equals(value, offset, length, other, otherOffset, otherLength);
}
@Override
public int compareTo(Binary other) {
return other.compareTo(value, offset, length);
}
@Override
int compareTo(byte[] other, int otherOffset, int otherLength) {
return Binary.compareTwoByteArrays(value, offset, length, other, otherOffset, otherLength);
}
@Override
public ByteBuffer toByteBuffer() {
return ByteBuffer.wrap(value, offset, length);
}
@Override
public void writeTo(DataOutput out) throws IOException {
out.write(value, offset, length);
}
}
private static class FromStringBinary extends ByteArrayBackedBinary {
public FromStringBinary(byte[] value) {
super(value);
}
@Override
public String toString() {
return "Binary{\"" + toStringUsingUTF8() + "\"}";
}
}
public static Binary fromByteArray(final byte[] value, final int offset, final int length) {
return new ByteArraySliceBackedBinary(value, offset, length);
}
private static class ByteArrayBackedBinary extends Binary {
private final byte[] value;
public ByteArrayBackedBinary(byte[] value) {
this.value = value;
}
@Override
public String toStringUsingUTF8() {
return UTF8.decode(ByteBuffer.wrap(value)).toString();
}
@Override
public int length() {
return value.length;
}
@Override
public void writeTo(OutputStream out) throws IOException {
out.write(value);
}
@Override
public byte[] getBytes() {
return value;
}
@Override
public int hashCode() {
return Binary.hashCode(value, 0, value.length);
}
@Override
boolean equals(Binary other) {
return other.equals(value, 0, value.length);
}
@Override
boolean equals(byte[] other, int otherOffset, int otherLength) {
return Binary.equals(value, 0, value.length, other, otherOffset, otherLength);
}
@Override
public int compareTo(Binary other) {
return other.compareTo(value, 0, value.length);
}
@Override
int compareTo(byte[] other, int otherOffset, int otherLength) {
return Binary.compareTwoByteArrays(value, 0, value.length, other, otherOffset, otherLength);
}
@Override
public ByteBuffer toByteBuffer() {
return ByteBuffer.wrap(value);
}
@Override
public void writeTo(DataOutput out) throws IOException {
out.write(value);
}
}
public static Binary fromByteArray(final byte[] value) {
return new ByteArrayBackedBinary(value);
}
private static class ByteBufferBackedBinary extends Binary {
private transient ByteBuffer value;
public ByteBufferBackedBinary(ByteBuffer value) {
this.value = value;
}
@Override
public String toStringUsingUTF8() {
return UTF8.decode(value).toString();
}
@Override
public int length() {
return value.remaining();
}
@Override
public void writeTo(OutputStream out) throws IOException {
// TODO: should not have to materialize those bytes
out.write(getBytes());
}
@Override
public byte[] getBytes() {
byte[] bytes = new byte[value.remaining()];
value.mark();
value.get(bytes).reset();
return bytes;
}
@Override
public int hashCode() {
if (value.hasArray()) {
return Binary.hashCode(value.array(), value.arrayOffset() + value.position(),
value.arrayOffset() + value.remaining());
}
byte[] bytes = getBytes();
return Binary.hashCode(bytes, 0, bytes.length);
}
@Override
boolean equals(Binary other) {
if (value.hasArray()) {
return other.equals(value.array(), value.arrayOffset() + value.position(),
value.arrayOffset() + value.remaining());
}
byte[] bytes = getBytes();
return other.equals(bytes, 0, bytes.length);
}
@Override
boolean equals(byte[] other, int otherOffset, int otherLength) {
if (value.hasArray()) {
return Binary.equals(value.array(), value.arrayOffset() + value.position(),
value.arrayOffset() + value.remaining(), other, otherOffset, otherLength);
}
byte[] bytes = getBytes();
return Binary.equals(bytes, 0, bytes.length, other, otherOffset, otherLength);
}
@Override
public int compareTo(Binary other) {
if (value.hasArray()) {
return other.compareTo(value.array(), value.arrayOffset() + value.position(),
value.arrayOffset() + value.remaining());
}
byte[] bytes = getBytes();
return other.compareTo(bytes, 0, bytes.length);
}
@Override
int compareTo(byte[] other, int otherOffset, int otherLength) {
if (value.hasArray()) {
return Binary.compareTwoByteArrays(value.array(), value.arrayOffset() + value.position(),
value.arrayOffset() + value.remaining(), other, otherOffset, otherLength);
}
byte[] bytes = getBytes();
return Binary.compareTwoByteArrays(bytes, 0, bytes.length, other, otherOffset, otherLength);
}
@Override
public ByteBuffer toByteBuffer() {
return value;
}
@Override
public void writeTo(DataOutput out) throws IOException {
// TODO: should not have to materialize those bytes
out.write(getBytes());
}
private void writeObject(java.io.ObjectOutputStream out) throws IOException {
byte[] bytes = getBytes();
out.writeInt(bytes.length);
out.write(bytes);
}
private void readObject(java.io.ObjectInputStream in) throws IOException, ClassNotFoundException {
int length = in.readInt();
byte[] bytes = new byte[length];
in.readFully(bytes, 0, length);
this.value = ByteBuffer.wrap(bytes);
}
private void readObjectNoData() throws ObjectStreamException {
this.value = ByteBuffer.wrap(new byte[0]);
}
}
public static Binary fromByteBuffer(final ByteBuffer value) {
return new ByteBufferBackedBinary(value);
}
public static Binary fromString(final String value) {
try {
return new FromStringBinary(value.getBytes("UTF-8"));
} catch (UnsupportedEncodingException e) {
throw new ParquetEncodingException("UTF-8 not supported.", e);
}
}
/**
* @see {@link Arrays#hashCode(byte[])}
* @param array
* @param offset
* @param length
* @return
*/
private static final int hashCode(byte[] array, int offset, int length) {
int result = 1;
for (int i = offset; i < offset + length; i++) {
byte b = array[i];
result = 31 * result + b;
}
return result;
}
/**
* @see {@link Arrays#equals(byte[], byte[])}
* @param array1
* @param offset1
* @param length1
* @param array2
* @param offset2
* @param length2
* @return
*/
private static final boolean equals(byte[] array1, int offset1, int length1, byte[] array2, int offset2, int length2) {
if (array1 == null && array2 == null) return true;
if (array1 == null || array2 == null) return false;
if (length1 != length2) return false;
if (array1 == array2 && offset1 == offset2) return true;
for (int i = 0; i < length1; i++) {
if (array1[i + offset1] != array2[i + offset2]) {
return false;
}
}
return true;
}
private static final int compareTwoByteArrays(byte[] array1, int offset1, int length1,
byte[] array2, int offset2, int length2) {
if (array1 == null && array2 == null) return 0;
if (array1 == array2 && offset1 == offset2 && length1 == length2) return 0;
int min_length = (length1 < length2) ? length1 : length2;
for (int i = 0; i < min_length; i++) {
if (array1[i + offset1] < array2[i + offset2]) {
return 1;
}
if (array1[i + offset1] > array2[i + offset2]) {
return -1;
}
}
// check remainder
if (length1 == length2) { return 0; }
else if (length1 < length2) { return 1;}
else { return -1; }
}
}
| {'content_hash': '40547eb837746b69ae6628dac82c1722', 'timestamp': '', 'source': 'github', 'line_count': 397, 'max_line_length': 121, 'avg_line_length': 27.49874055415617, 'alnum_prop': 0.6438582028029678, 'repo_name': 'dlanza1/parquet-mr', 'id': 'a297603ee55cbdf5e7762c013cfe854354e28a23', 'size': '11510', 'binary': False, 'copies': '4', 'ref': 'refs/heads/master', 'path': 'parquet-column/src/main/java/parquet/io/api/Binary.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'AspectJ', 'bytes': '9732'}, {'name': 'CSS', 'bytes': '5322'}, {'name': 'Java', 'bytes': '6039879'}, {'name': 'Makefile', 'bytes': '3317'}, {'name': 'Python', 'bytes': '26525'}, {'name': 'Scala', 'bytes': '5969'}, {'name': 'Shell', 'bytes': '55216'}, {'name': 'Thrift', 'bytes': '6482'}, {'name': 'XSLT', 'bytes': '14009'}]} |
layout: default
disqus: true
class: 'post-template'
---
<!-- < default -->
<!-- The comment above "< default" means - insert everything in this file into -->
<!-- the [body] of the default.hbs template, which contains our header/footer. -->
<!-- Everything inside the #post tags pulls data from the post -->
<!-- #post -->
<header class="main-header post-head {% if page.cover %}" style="background-image: url({{ site.baseurl }}{{ page.cover }}) {% else %}no-cover{% endif %}">
<nav class="main-nav {% if page.cover %} overlay {% endif %} clearfix">
{% if page.logo %}<a class="blog-logo" href="{{ site.baseurl }}"><img src="{{ site.baseurl }}{{ page.logo }}" alt="Blog Logo" /></a>{% endif %}
{% if page.navigation %}
<a class="menu-button icon-menu" href="#"><span class="word">Menu</span></a>
{% endif %}
</nav>
</header>
<main class="content" role="main">
<article class="{{ page.subclass }}">
<header class="post-header">
<h1 class="post-title">{{ page.title }}</h1>
<section class="post-meta">
<!-- <a href='{{ site.baseurl }}{{ page.about }}'>{{ site.author }}</a> -->
<time class="post-date" datetime="{{ page.date | date:'%Y-%m-%d' }}">{{ page.date | date_to_string }}</time>
<!-- [[tags prefix=" on "]] -->
{% if page.tags.size > 0 %}
on
{% for tag in page.tags %}
{% if forloop.index == page.tags.size %}
<a href='{{ site.baseurl }}tag/{{ tag }}'>{{ tag | capitalize }}</a>
{% else %}
<a href='{{ site.baseurl }}tag/{{ tag }}'>{{ tag | capitalize }}</a>,
{% endif %}
{% endfor %}
{% endif %}
</section>
</header>
<section class="post-content">
{{ content }}
</section>
<footer class="post-footer">
<!-- Everything inside the #author tags pulls data from the author -->
<!-- #author-->
{% if site.image %}
<figure class="author-image">
<a class="img" href="{{ site.baseurl }}author/{{ site.nickname }}" style="background-image: url({{ site.baseurl }}{{ site.image }})"><span class="hidden">{{ page.author }}'s Picture</span></a>
</figure>
{% endif %}
<section class="author">
<h4><a href="{{ site.baseurl }}author/{{ site.nickname }}">{{ site.author }}</a></h4>
{% if site.bio %}
<p> {{ site.bio }}</p>
{% else %}
<p>Read <a href="{{ site.baseurl }}author/{{ site.nickname }}">more posts</a> by this author.</p>
{% endif %}
<div class="author-meta">
{% if site.location %}<span class="author-location icon-location"> {{ site.location }}</span>{% endif %}
{% if site.url %}<span class="author-link icon-link"><a href="{{ site.url }}"> {{ site.short_url }}</a></span>{% endif %}
</div>
</section>
<!-- /author -->
<section class="share">
<h4>Share this post</h4>
<a class="icon-twitter" href="http://twitter.com/share?text={{ page.title }}&url={{ site.url }}{{ page.url | remove: '/' }}"
onclick="window.open(this.href, 'twitter-share', 'width=550,height=235');return false;">
<span class="hidden">Twitter</span>
</a>
<a class="icon-facebook" href="https://www.facebook.com/sharer/sharer.php?u={{ site.url }}{{ page.url | remove: '/' }}"
onclick="window.open(this.href, 'facebook-share','width=580,height=296');return false;">
<span class="hidden">Facebook</span>
</a>
<a class="icon-google-plus" href="https://plus.google.com/share?url={{ site.url }}{{ page.url | remove: '/' }}"
onclick="window.open(this.href, 'google-plus-share', 'width=490,height=530');return false;">
<span class="hidden">Google+</span>
</a>
</section>
<!-- Add Disqus Comments -->
{% include disqus.html %}
</footer>
</article>
</main>
<aside class="read-next">
<!-- [[! next_post ]] -->{{ post.url | remove: '/' }}
{% if page.next %}
<a class="read-next-story {% if page.next.cover %}" style="background-image: url({{ site.baseurl }}{{ page.next.cover }}){% else %}no-cover{% endif %}" href="{{ site.baseurl }}{{ page.next.url | remove: '/' }}">
<section class="post">
<h2>{{ page.next.title }}</h2>
<p>{{ page.next.content | strip_html | truncatewords:15 }}</p>
</section>
</a>
{% endif %}
<!-- [[! /next_post ]] -->
<!-- [[! prev_post ]] -->
{% if page.previous %}
<a class="read-next-story prev {% if page.previous.cover %}" style="background-image: url({{ site.baseurl }}{{ page.previous.cover }}){% else %}no-cover{% endif %}" href="{{ site.baseurl }}{{ page.previous.url | remove: '/' }}">
<section class="post">
<h2>{{ page.previous.title }}</h2>
<p>{{ page.previous.content | strip_html | truncatewords:15 }}</p>
</section>
</a>
{% endif %}
<!-- [[! /prev_post ]] -->
</aside>
<!-- /post -->
| {'content_hash': '30d5d77b6ce1ed1eca5b59ff2da4cf53', 'timestamp': '', 'source': 'github', 'line_count': 128, 'max_line_length': 236, 'avg_line_length': 43.875, 'alnum_prop': 0.47596153846153844, 'repo_name': 'jeffersonRibeiro/blog', 'id': '747e971ec51c3f1a5fbbe9e3b0c9a4022749ca38', 'size': '5620', 'binary': False, 'copies': '1', 'ref': 'refs/heads/gh-pages', 'path': '_layouts/post.html', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '46735'}, {'name': 'HTML', 'bytes': '40450'}, {'name': 'JavaScript', 'bytes': '4424'}, {'name': 'Ruby', 'bytes': '9344'}]} |
package de.m0ep.uni.ma;
import java.io.File;
import java.io.IOException;
import java.net.URISyntaxException;
import java.net.URL;
import org.apache.activemq.broker.BrokerService;
import org.apache.activemq.camel.component.ActiveMQComponent;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.impl.DefaultCamelContext;
import org.apache.commons.io.FileUtils;
import org.ontoware.rdf2go.model.Model;
import org.ontoware.rdf2go.model.Syntax;
import org.ontoware.rdf2go.util.RDFTool;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import de.m0ep.socc.camel.SoccComponent;
import de.m0ep.socc.core.SoccContext;
import de.m0ep.socc.core.utils.RdfUtils;
public class ProofOfConcept {
private static final Logger LOG = LoggerFactory.getLogger( ProofOfConcept.class );
public static void main( String[] args ) {
ProofOfConcept poc = new ProofOfConcept();
poc.start();
}
private BrokerService activeMqBroker;
private DefaultCamelContext camelContext;
private Model model;
public void start() {
initModel();
initCamelContext();
startActiveMqBroker();
try {
camelContext.addRoutes( routeBuilder );
camelContext.start();
} catch ( Exception e ) {
LOG.error( "Failed to add routes and start CamelContext", e );
System.exit( 1 );
}
try {
System.in.read();
} catch ( IOException e1 ) {
}
try {
camelContext.stop();
activeMqBroker.stop();
} catch ( Exception e ) {
LOG.error( "Errors while shutting down", e );
}
System.err.println( RdfUtils.modelToString( model, Syntax.Turtle ) );
}
private void initModel() {
File defaultModelFile = null;
try {
URL defaultModelUrl = getClass().getClassLoader().getResource( "poc_config_model.ttl" );
defaultModelFile = new File( defaultModelUrl.toURI() );
} catch ( URISyntaxException e ) {
LOG.error( "Failed to get URI of default model file", e );
System.exit( 1 );
}
try {
String turtleContent = FileUtils.readFileToString( defaultModelFile );
if ( LOG.isDebugEnabled() ) {
LOG.debug( "Loaded default model content:\n{}", turtleContent );
} else {
LOG.info( "Loaded {} bytes of the default model content.", turtleContent.length() );
}
model = RDFTool.stringToModel( turtleContent, Syntax.Turtle );
// System.err.println( RdfUtils.modelToString( model, Syntax.Turtle ) );
} catch ( IOException e ) {
LOG.error( "Failed to load default model", e );
System.exit( 1 );
}
}
private final RouteBuilder routeBuilder = new RouteBuilder() {
@Override
public void configure() throws Exception {
// Route 1: with an ActiveMQ JMS Topic between the Canvas discussion topic and Facebook
// group feed
from( "socc://poc-canvas?uri=https://canvas.instructure.com/api/v1/"
+ "courses/798152/discussion_topics/1540697"
+ "&delay=20000" )
.to( "activemq:topic:canvas-topic" );
from( "activemq:topic:canvas-topic" )
.to( "socc://poc-facebook"
+ "?uri=https://graph.facebook.com/520312298060793_520417398050283" );
// Route 2: direkt route from facebook group feed to canvas discussion topic
from( "socc://poc-facebook"
+ "?uri=https://graph.facebook.com/520312298060793_520417398050283"
+ "&delay=20000" )
.to( "socc://poc-canvas?uri=https://canvas.instructure.com/api/v1/"
+ "courses/798152/discussion_topics/1540697" );
// example routes for facebook, moodle and youtube to their own JMS topic
//from( "socc://poc-facebook?uri=https://graph.facebook.com/"
// + "520312298060793_520417398050283"
// + "&delay=20000" )
// .to( "activemq:topic:facebook-topic" );
//from( "socc://poc-moodle"
// + "?uri=http://localhost/moodle/mod/forum/discuss.php"
// + "?d=3&delay=10000" )
// .to( "activemq:topic:moodle-topic" );
//from( "socc://poc-youtube"
// + "?uri=http://gdata.youtube.com/feeds/api/videos/80_b2-BA_Qg"
// + "&delay=10000" )
// .to( "activemq:topic:youtube-topic" );
// log topic messages
//from( "activemq:topic:youtube" )
// .to( "log://poc-log" );
}
};
private void startActiveMqBroker() {
LOG.info( "Starting ActiveMQ Broker..." );
activeMqBroker = new BrokerService();
activeMqBroker.setPersistent( false );
try {
activeMqBroker.addConnector( "tcp://localhost:61616" );
} catch ( Exception e ) {
LOG.error( "Failed to add Connector to ActiveMQ Broker", e );
}
try {
activeMqBroker.start();
} catch ( Exception e ) {
LOG.error( "Failed to start ActiveMQ Broker", e );
System.exit( 1 );
}
}
private void initCamelContext() {
LOG.info( "Starting CamelContext..." );
camelContext = new DefaultCamelContext();
camelContext.addComponent(
"activemq",
ActiveMQComponent.activeMQComponent( "tcp://localhost:61616" ) );
try {
camelContext.addComponent(
"socc",
new SoccComponent( camelContext, new SoccContext( model ) ) );
} catch ( Exception e ) {
LOG.error( "Failed to add SoccComponent to CamelContext.", e );
System.exit( 1 );
}
}
}
| {'content_hash': 'a233ed90b3c27210c695e59dd131dc59', 'timestamp': '', 'source': 'github', 'line_count': 171, 'max_line_length': 91, 'avg_line_length': 30.339181286549707, 'alnum_prop': 0.6613338473400154, 'repo_name': 'm0ep/master-thesis', 'id': 'dec90fb3f81c34b6f2925156da3618ca1612c2a6', 'size': '5188', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'source/proof-of-concept/src/main/java/de/m0ep/uni/ma/ProofOfConcept.java', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'Java', 'bytes': '3799026'}]} |
#include <srs_app_dvr.hpp>
#ifdef SRS_AUTO_DVR
#include <fcntl.h>
#include <sstream>
#include <algorithm>
using namespace std;
#include <srs_app_config.hpp>
#include <srs_rtmp_stack.hpp>
#include <srs_core_autofree.hpp>
#include <srs_kernel_utility.hpp>
#include <srs_app_http_hooks.hpp>
#include <srs_kernel_codec.hpp>
#include <srs_kernel_flv.hpp>
#include <srs_kernel_file.hpp>
#include <srs_protocol_amf0.hpp>
#include <srs_kernel_buffer.hpp>
#include <srs_protocol_json.hpp>
#include <srs_app_utility.hpp>
// update the flv duration and filesize every this interval in ms.
#define SRS_DVR_UPDATE_DURATION_INTERVAL 60000
SrsFlvSegment::SrsFlvSegment(SrsDvrPlan* p)
{
req = NULL;
jitter = NULL;
plan = p;
fs = new SrsFileWriter();
enc = new SrsFlvEncoder();
jitter_algorithm = SrsRtmpJitterAlgorithmOFF;
path = "";
has_keyframe = false;
duration = 0;
starttime = -1;
stream_starttime = 0;
stream_previous_pkt_time = -1;
stream_duration = 0;
duration_offset = 0;
filesize_offset = 0;
_srs_config->subscribe(this);
}
SrsFlvSegment::~SrsFlvSegment()
{
_srs_config->unsubscribe(this);
srs_freep(jitter);
srs_freep(fs);
srs_freep(enc);
}
int SrsFlvSegment::initialize(SrsRequest* r)
{
int ret = ERROR_SUCCESS;
req = r;
jitter_algorithm = (SrsRtmpJitterAlgorithm)_srs_config->get_dvr_time_jitter(req->vhost);
return ret;
}
bool SrsFlvSegment::is_overflow(int64_t max_duration)
{
return duration >= max_duration;
}
int SrsFlvSegment::open(bool use_tmp_file)
{
int ret = ERROR_SUCCESS;
// ignore when already open.
if (fs->is_open()) {
return ret;
}
path = generate_path();
bool fresh_flv_file = !srs_path_exists(path);
// create dir first.
std::string dir = path.substr(0, path.rfind("/"));
if ((ret = srs_create_dir_recursively(dir)) != ERROR_SUCCESS) {
srs_error("create dir=%s failed. ret=%d", dir.c_str(), ret);
return ret;
}
srs_info("create dir=%s ok", dir.c_str());
// create jitter.
if ((ret = create_jitter(!fresh_flv_file)) != ERROR_SUCCESS) {
srs_error("create jitter failed, path=%s, fresh=%d. ret=%d", path.c_str(), fresh_flv_file, ret);
return ret;
}
// generate the tmp flv path.
if (!fresh_flv_file || !use_tmp_file) {
// when path exists, always append to it.
// so we must use the target flv path as output flv.
tmp_flv_file = path;
} else {
// when path not exists, dvr to tmp file.
tmp_flv_file = path + ".tmp";
}
// open file writer, in append or create mode.
if (!fresh_flv_file) {
if ((ret = fs->open_append(tmp_flv_file)) != ERROR_SUCCESS) {
srs_error("append file stream for file %s failed. ret=%d", path.c_str(), ret);
return ret;
}
srs_trace("dvr: always append to when exists, file=%s.", path.c_str());
} else {
if ((ret = fs->open(tmp_flv_file)) != ERROR_SUCCESS) {
srs_error("open file stream for file %s failed. ret=%d", path.c_str(), ret);
return ret;
}
}
// initialize the encoder.
if ((ret = enc->initialize(fs)) != ERROR_SUCCESS) {
srs_error("initialize enc by fs for file %s failed. ret=%d", path.c_str(), ret);
return ret;
}
// when exists, donot write flv header.
if (fresh_flv_file) {
// write the flv header to writer.
if ((ret = enc->write_header()) != ERROR_SUCCESS) {
srs_error("write flv header failed. ret=%d", ret);
return ret;
}
}
// update the duration and filesize offset.
duration_offset = 0;
filesize_offset = 0;
srs_trace("dvr stream %s to file %s", req->stream.c_str(), path.c_str());
return ret;
}
int SrsFlvSegment::close()
{
int ret = ERROR_SUCCESS;
// ignore when already closed.
if (!fs->is_open()) {
return ret;
}
// update duration and filesize.
if ((ret = update_flv_metadata()) != ERROR_SUCCESS) {
return ret;
}
fs->close();
// when tmp flv file exists, reap it.
if (tmp_flv_file != path) {
if (rename(tmp_flv_file.c_str(), path.c_str()) < 0) {
ret = ERROR_SYSTEM_FILE_RENAME;
srs_error("rename flv file failed, %s => %s. ret=%d",
tmp_flv_file.c_str(), path.c_str(), ret);
return ret;
}
}
// TODO: FIXME: the http callback is async, which will trigger thread switch,
// so the on_video maybe invoked during the http callback, and error.
if ((ret = plan->on_reap_segment()) != ERROR_SUCCESS) {
srs_error("dvr: notify plan to reap segment failed. ret=%d", ret);
return ret;
}
return ret;
}
int SrsFlvSegment::write_metadata(SrsSharedPtrMessage* metadata)
{
int ret = ERROR_SUCCESS;
if (duration_offset || filesize_offset) {
return ret;
}
SrsBuffer stream;
if ((ret = stream.initialize(metadata->payload, metadata->size)) != ERROR_SUCCESS) {
return ret;
}
SrsAmf0Any* name = SrsAmf0Any::str();
SrsAutoFree(SrsAmf0Any, name);
if ((ret = name->read(&stream)) != ERROR_SUCCESS) {
return ret;
}
SrsAmf0Object* obj = SrsAmf0Any::object();
SrsAutoFree(SrsAmf0Object, obj);
if ((ret = obj->read(&stream)) != ERROR_SUCCESS) {
return ret;
}
// remove duration and filesize.
obj->set("filesize", NULL);
obj->set("duration", NULL);
// add properties.
obj->set("service", SrsAmf0Any::str(RTMP_SIG_SRS_SERVER));
obj->set("filesize", SrsAmf0Any::number(0));
obj->set("duration", SrsAmf0Any::number(0));
int size = name->total_size() + obj->total_size();
char* payload = new char[size];
SrsAutoFree(char, payload);
// 11B flv header, 3B object EOF, 8B number value, 1B number flag.
duration_offset = fs->tellg() + size + 11 - SrsAmf0Size::object_eof() - SrsAmf0Size::number();
// 2B string flag, 8B number value, 8B string 'duration', 1B number flag
filesize_offset = duration_offset - SrsAmf0Size::utf8("duration") - SrsAmf0Size::number();
// convert metadata to bytes.
if ((ret = stream.initialize(payload, size)) != ERROR_SUCCESS) {
return ret;
}
if ((ret = name->write(&stream)) != ERROR_SUCCESS) {
return ret;
}
if ((ret = obj->write(&stream)) != ERROR_SUCCESS) {
return ret;
}
// to flv file.
if ((ret = enc->write_metadata(18, payload, size)) != ERROR_SUCCESS) {
return ret;
}
return ret;
}
int SrsFlvSegment::write_audio(SrsSharedPtrMessage* shared_audio)
{
int ret = ERROR_SUCCESS;
SrsSharedPtrMessage* audio = shared_audio->copy();
SrsAutoFree(SrsSharedPtrMessage, audio);
if ((jitter->correct(audio, jitter_algorithm)) != ERROR_SUCCESS) {
return ret;
}
char* payload = audio->payload;
int size = audio->size;
int64_t timestamp = plan->filter_timestamp(audio->timestamp);
if ((ret = enc->write_audio(timestamp, payload, size)) != ERROR_SUCCESS) {
return ret;
}
if ((ret = on_update_duration(audio)) != ERROR_SUCCESS) {
return ret;
}
return ret;
}
int SrsFlvSegment::write_video(SrsSharedPtrMessage* shared_video)
{
int ret = ERROR_SUCCESS;
SrsSharedPtrMessage* video = shared_video->copy();
SrsAutoFree(SrsSharedPtrMessage, video);
char* payload = video->payload;
int size = video->size;
bool is_sequence_header = SrsFlvCodec::video_is_sequence_header(payload, size);
#ifdef SRS_AUTO_HTTP_CALLBACK
bool is_key_frame = SrsFlvCodec::video_is_h264(payload, size)
&& SrsFlvCodec::video_is_keyframe(payload, size) && !is_sequence_header;
if (is_key_frame) {
has_keyframe = true;
if ((ret = plan->on_video_keyframe()) != ERROR_SUCCESS) {
return ret;
}
}
srs_verbose("dvr video is key: %d", is_key_frame);
#endif
// accept the sequence header here.
// when got no keyframe, ignore when should wait keyframe.
if (!has_keyframe && !is_sequence_header) {
bool wait_keyframe = _srs_config->get_dvr_wait_keyframe(req->vhost);
if (wait_keyframe) {
srs_info("dvr: ignore when wait keyframe.");
return ret;
}
}
if ((jitter->correct(video, jitter_algorithm)) != ERROR_SUCCESS) {
return ret;
}
// update segment duration, session plan just update the duration,
// the segment plan will reap segment if exceed, this video will write to next segment.
if ((ret = on_update_duration(video)) != ERROR_SUCCESS) {
return ret;
}
int32_t timestamp = (int32_t)plan->filter_timestamp(video->timestamp);
if ((ret = enc->write_video(timestamp, payload, size)) != ERROR_SUCCESS) {
return ret;
}
return ret;
}
int SrsFlvSegment::update_flv_metadata()
{
int ret = ERROR_SUCCESS;
// no duration or filesize specified.
if (!duration_offset || !filesize_offset) {
return ret;
}
int64_t cur = fs->tellg();
// buffer to write the size.
char* buf = new char[SrsAmf0Size::number()];
SrsAutoFree(char, buf);
SrsBuffer stream;
if ((ret = stream.initialize(buf, SrsAmf0Size::number())) != ERROR_SUCCESS) {
return ret;
}
// filesize to buf.
SrsAmf0Any* size = SrsAmf0Any::number((double)cur);
SrsAutoFree(SrsAmf0Any, size);
stream.skip(-1 * stream.pos());
if ((ret = size->write(&stream)) != ERROR_SUCCESS) {
return ret;
}
// update the flesize.
fs->lseek(filesize_offset);
if ((ret = fs->write(buf, SrsAmf0Size::number(), NULL)) != ERROR_SUCCESS) {
return ret;
}
// duration to buf
SrsAmf0Any* dur = SrsAmf0Any::number((double)duration / 1000.0);
SrsAutoFree(SrsAmf0Any, dur);
stream.skip(-1 * stream.pos());
if ((ret = dur->write(&stream)) != ERROR_SUCCESS) {
return ret;
}
// update the duration
fs->lseek(duration_offset);
if ((ret = fs->write(buf, SrsAmf0Size::number(), NULL)) != ERROR_SUCCESS) {
return ret;
}
// reset the offset.
fs->lseek(cur);
return ret;
}
string SrsFlvSegment::get_path()
{
return path;
}
string SrsFlvSegment::generate_path()
{
// the path in config, for example,
// /data/[vhost]/[app]/[stream]/[2006]/[01]/[02]/[15].[04].[05].[999].flv
std::string path_config = _srs_config->get_dvr_path(req->vhost);
// add [stream].[timestamp].flv as filename for dir
if (path_config.find(".flv") != path_config.length() - 4) {
path_config += "/[stream].[timestamp].flv";
}
// the flv file path
std::string flv_path = path_config;
flv_path = srs_path_build_stream(flv_path, req->vhost, req->app, req->stream);
flv_path = srs_path_build_timestamp(flv_path);
return flv_path;
}
int SrsFlvSegment::create_jitter(bool loads_from_flv)
{
int ret = ERROR_SUCCESS;
// when path exists, use exists jitter.
if (!loads_from_flv) {
// jitter when publish, ensure whole stream start from 0.
srs_freep(jitter);
jitter = new SrsRtmpJitter();
// fresh stream starting.
starttime = -1;
stream_previous_pkt_time = -1;
stream_starttime = srs_update_system_time_ms();
stream_duration = 0;
// fresh segment starting.
has_keyframe = false;
duration = 0;
return ret;
}
// when jitter ok, do nothing.
if (jitter) {
return ret;
}
// always ensure the jitter crote.
// for the first time, initialize jitter from exists file.
jitter = new SrsRtmpJitter();
// TODO: FIXME: implements it.
return ret;
}
int SrsFlvSegment::on_update_duration(SrsSharedPtrMessage* msg)
{
int ret = ERROR_SUCCESS;
// we must assumpt that the stream timestamp is monotonically increase,
// that is, always use time jitter to correct the timestamp.
// except the time jitter is disabled in config.
// set the segment starttime at first time
if (starttime < 0) {
starttime = msg->timestamp;
}
// no previous packet or timestamp overflow.
if (stream_previous_pkt_time < 0 || stream_previous_pkt_time > msg->timestamp) {
stream_previous_pkt_time = msg->timestamp;
}
// collect segment and stream duration, timestamp overflow is ok.
duration += msg->timestamp - stream_previous_pkt_time;
stream_duration += msg->timestamp - stream_previous_pkt_time;
// update previous packet time
stream_previous_pkt_time = msg->timestamp;
return ret;
}
int SrsFlvSegment::on_reload_vhost_dvr(std::string /*vhost*/)
{
int ret = ERROR_SUCCESS;
jitter_algorithm = (SrsRtmpJitterAlgorithm)_srs_config->get_dvr_time_jitter(req->vhost);
return ret;
}
SrsDvrAsyncCallOnDvr::SrsDvrAsyncCallOnDvr(int c, SrsRequest* r, string p)
{
cid = c;
req = r->copy();
path = p;
}
SrsDvrAsyncCallOnDvr::~SrsDvrAsyncCallOnDvr()
{
srs_freep(req);
}
int SrsDvrAsyncCallOnDvr::call()
{
int ret = ERROR_SUCCESS;
#ifdef SRS_AUTO_HTTP_CALLBACK
if (!_srs_config->get_vhost_http_hooks_enabled(req->vhost)) {
return ret;
}
// the http hooks will cause context switch,
// so we must copy all hooks for the on_connect may freed.
// @see https://github.com/simple-rtmp-server/srs/issues/475
vector<string> hooks;
if (true) {
SrsConfDirective* conf = _srs_config->get_vhost_on_dvr(req->vhost);
if (!conf) {
srs_info("ignore the empty http callback: on_dvr");
return ret;
}
hooks = conf->args;
}
for (int i = 0; i < (int)hooks.size(); i++) {
std::string url = hooks.at(i);
if ((ret = SrsHttpHooks::on_dvr(cid, url, req, path)) != ERROR_SUCCESS) {
srs_error("hook client on_dvr failed. url=%s, ret=%d", url.c_str(), ret);
return ret;
}
}
#endif
return ret;
}
string SrsDvrAsyncCallOnDvr::to_string()
{
std::stringstream ss;
ss << "vhost=" << req->vhost << ", file=" << path;
return ss.str();
}
SrsDvrPlan::SrsDvrPlan()
{
req = NULL;
dvr_enabled = false;
segment = new SrsFlvSegment(this);
async = new SrsAsyncCallWorker();
}
SrsDvrPlan::~SrsDvrPlan()
{
srs_freep(segment);
srs_freep(async);
}
int SrsDvrPlan::initialize(SrsRequest* r)
{
int ret = ERROR_SUCCESS;
req = r;
if ((ret = segment->initialize(r)) != ERROR_SUCCESS) {
return ret;
}
if ((ret = async->start()) != ERROR_SUCCESS) {
return ret;
}
return ret;
}
int SrsDvrPlan::on_video_keyframe()
{
return ERROR_SUCCESS;
}
int64_t SrsDvrPlan::filter_timestamp(int64_t timestamp)
{
return timestamp;
}
int SrsDvrPlan::on_meta_data(SrsSharedPtrMessage* shared_metadata)
{
int ret = ERROR_SUCCESS;
if (!dvr_enabled) {
return ret;
}
return segment->write_metadata(shared_metadata);
}
int SrsDvrPlan::on_audio(SrsSharedPtrMessage* shared_audio)
{
int ret = ERROR_SUCCESS;
if (!dvr_enabled) {
return ret;
}
if ((ret = segment->write_audio(shared_audio)) != ERROR_SUCCESS) {
return ret;
}
return ret;
}
int SrsDvrPlan::on_video(SrsSharedPtrMessage* shared_video)
{
int ret = ERROR_SUCCESS;
if (!dvr_enabled) {
return ret;
}
if ((ret = segment->write_video(shared_video)) != ERROR_SUCCESS) {
return ret;
}
return ret;
}
int SrsDvrPlan::on_reap_segment()
{
int ret = ERROR_SUCCESS;
int cid = _srs_context->get_id();
if ((ret = async->execute(new SrsDvrAsyncCallOnDvr(cid, req, segment->get_path()))) != ERROR_SUCCESS) {
return ret;
}
return ret;
}
SrsDvrPlan* SrsDvrPlan::create_plan(string vhost)
{
std::string plan = _srs_config->get_dvr_plan(vhost);
if (srs_config_dvr_is_plan_segment(plan)) {
return new SrsDvrSegmentPlan();
} else if (srs_config_dvr_is_plan_session(plan)) {
return new SrsDvrSessionPlan();
} else if (srs_config_dvr_is_plan_append(plan)) {
return new SrsDvrAppendPlan();
} else {
srs_error("invalid dvr plan=%s, vhost=%s", plan.c_str(), vhost.c_str());
srs_assert(false);
}
}
SrsDvrSessionPlan::SrsDvrSessionPlan()
{
}
SrsDvrSessionPlan::~SrsDvrSessionPlan()
{
}
int SrsDvrSessionPlan::on_publish()
{
int ret = ERROR_SUCCESS;
// support multiple publish.
if (dvr_enabled) {
return ret;
}
if (!_srs_config->get_dvr_enabled(req->vhost)) {
return ret;
}
if ((ret = segment->close()) != ERROR_SUCCESS) {
return ret;
}
if ((ret = segment->open()) != ERROR_SUCCESS) {
return ret;
}
dvr_enabled = true;
return ret;
}
void SrsDvrSessionPlan::on_unpublish()
{
// support multiple publish.
if (!dvr_enabled) {
return;
}
// ignore error.
int ret = segment->close();
if (ret != ERROR_SUCCESS) {
srs_warn("ignore flv close error. ret=%d", ret);
}
dvr_enabled = false;
}
SrsDvrAppendPlan::SrsDvrAppendPlan()
{
last_update_time = 0;
}
SrsDvrAppendPlan::~SrsDvrAppendPlan()
{
}
int SrsDvrAppendPlan::on_publish()
{
int ret = ERROR_SUCCESS;
// support multiple publish.
if (dvr_enabled) {
return ret;
}
if (!_srs_config->get_dvr_enabled(req->vhost)) {
return ret;
}
if ((ret = segment->open(false)) != ERROR_SUCCESS) {
return ret;
}
dvr_enabled = true;
return ret;
}
void SrsDvrAppendPlan::on_unpublish()
{
}
int SrsDvrAppendPlan::on_audio(SrsSharedPtrMessage* shared_audio)
{
int ret = ERROR_SUCCESS;
if ((ret = update_duration(shared_audio)) != ERROR_SUCCESS) {
return ret;
}
if ((ret = SrsDvrPlan::on_audio(shared_audio)) != ERROR_SUCCESS) {
return ret;
}
return ret;
}
int SrsDvrAppendPlan::on_video(SrsSharedPtrMessage* shared_video)
{
int ret = ERROR_SUCCESS;
if ((ret = update_duration(shared_video)) != ERROR_SUCCESS) {
return ret;
}
if ((ret = SrsDvrPlan::on_video(shared_video)) != ERROR_SUCCESS) {
return ret;
}
return ret;
}
int SrsDvrAppendPlan::update_duration(SrsSharedPtrMessage* msg)
{
int ret = ERROR_SUCCESS;
if (last_update_time <= 0) {
last_update_time = msg->timestamp;
return ret;
}
if (msg->timestamp < last_update_time) {
last_update_time = msg->timestamp;
return ret;
}
if (SRS_DVR_UPDATE_DURATION_INTERVAL > msg->timestamp - last_update_time) {
return ret;
}
last_update_time = msg->timestamp;
srs_assert(segment);
if (!segment->update_flv_metadata()) {
return ret;
}
return ret;
}
SrsDvrSegmentPlan::SrsDvrSegmentPlan()
{
segment_duration = -1;
metadata = sh_video = sh_audio = NULL;
}
SrsDvrSegmentPlan::~SrsDvrSegmentPlan()
{
srs_freep(sh_video);
srs_freep(sh_audio);
srs_freep(metadata);
}
int SrsDvrSegmentPlan::initialize(SrsRequest* req)
{
int ret = ERROR_SUCCESS;
if ((ret = SrsDvrPlan::initialize(req)) != ERROR_SUCCESS) {
return ret;
}
segment_duration = _srs_config->get_dvr_duration(req->vhost);
// to ms
segment_duration *= 1000;
return ret;
}
int SrsDvrSegmentPlan::on_publish()
{
int ret = ERROR_SUCCESS;
// support multiple publish.
if (dvr_enabled) {
return ret;
}
if (!_srs_config->get_dvr_enabled(req->vhost)) {
return ret;
}
if ((ret = segment->close()) != ERROR_SUCCESS) {
return ret;
}
if ((ret = segment->open()) != ERROR_SUCCESS) {
return ret;
}
dvr_enabled = true;
return ret;
}
void SrsDvrSegmentPlan::on_unpublish()
{
}
int SrsDvrSegmentPlan::on_meta_data(SrsSharedPtrMessage* shared_metadata)
{
int ret = ERROR_SUCCESS;
srs_freep(metadata);
metadata = shared_metadata->copy();
if ((ret = SrsDvrPlan::on_meta_data(shared_metadata)) != ERROR_SUCCESS) {
return ret;
}
return ret;
}
int SrsDvrSegmentPlan::on_audio(SrsSharedPtrMessage* shared_audio)
{
int ret = ERROR_SUCCESS;
if (SrsFlvCodec::audio_is_sequence_header(shared_audio->payload, shared_audio->size)) {
srs_freep(sh_audio);
sh_audio = shared_audio->copy();
}
if ((ret = update_duration(shared_audio)) != ERROR_SUCCESS) {
return ret;
}
if ((ret = SrsDvrPlan::on_audio(shared_audio)) != ERROR_SUCCESS) {
return ret;
}
return ret;
}
int SrsDvrSegmentPlan::on_video(SrsSharedPtrMessage* shared_video)
{
int ret = ERROR_SUCCESS;
if (SrsFlvCodec::video_is_sequence_header(shared_video->payload, shared_video->size)) {
srs_freep(sh_video);
sh_video = shared_video->copy();
}
if ((ret = update_duration(shared_video)) != ERROR_SUCCESS) {
return ret;
}
if ((ret = SrsDvrPlan::on_video(shared_video)) != ERROR_SUCCESS) {
return ret;
}
return ret;
}
int SrsDvrSegmentPlan::update_duration(SrsSharedPtrMessage* msg)
{
int ret = ERROR_SUCCESS;
srs_assert(segment);
// ignore if duration ok.
if (segment_duration <= 0 || !segment->is_overflow(segment_duration)) {
return ret;
}
// when wait keyframe, ignore if no frame arrived.
// @see https://github.com/simple-rtmp-server/srs/issues/177
if (_srs_config->get_dvr_wait_keyframe(req->vhost)) {
if (!msg->is_video()) {
return ret;
}
char* payload = msg->payload;
int size = msg->size;
bool is_key_frame = SrsFlvCodec::video_is_h264(payload, size)
&& SrsFlvCodec::video_is_keyframe(payload, size)
&& !SrsFlvCodec::video_is_sequence_header(payload, size);
if (!is_key_frame) {
return ret;
}
}
// reap segment
if ((ret = segment->close()) != ERROR_SUCCESS) {
return ret;
}
// open new flv file
if ((ret = segment->open()) != ERROR_SUCCESS) {
return ret;
}
// update sequence header
if (metadata && (ret = SrsDvrPlan::on_meta_data(metadata)) != ERROR_SUCCESS) {
return ret;
}
if (sh_video && (ret = SrsDvrPlan::on_video(sh_video)) != ERROR_SUCCESS) {
return ret;
}
if (sh_audio && (ret = SrsDvrPlan::on_audio(sh_audio)) != ERROR_SUCCESS) {
return ret;
}
return ret;
}
SrsDvr::SrsDvr()
{
source = NULL;
plan = NULL;
req = NULL;
actived = false;
_srs_config->subscribe(this);
}
SrsDvr::~SrsDvr()
{
_srs_config->unsubscribe(this);
srs_freep(plan);
}
int SrsDvr::initialize(SrsSource* s, SrsRequest* r)
{
int ret = ERROR_SUCCESS;
req = r;
source = s;
SrsConfDirective* conf = _srs_config->get_dvr_apply(r->vhost);
actived = srs_config_apply_filter(conf, r);
srs_freep(plan);
plan = SrsDvrPlan::create_plan(r->vhost);
if ((ret = plan->initialize(r)) != ERROR_SUCCESS) {
return ret;
}
return ret;
}
int SrsDvr::on_publish(bool fetch_sequence_header)
{
int ret = ERROR_SUCCESS;
// the dvr for this stream is not actived.
if (!actived) {
return ret;
}
if ((ret = plan->on_publish()) != ERROR_SUCCESS) {
return ret;
}
if (fetch_sequence_header && (ret = source->on_dvr_request_sh()) != ERROR_SUCCESS) {
return ret;
}
return ret;
}
void SrsDvr::on_unpublish()
{
plan->on_unpublish();
}
// TODO: FIXME: source should use shared message instead.
int SrsDvr::on_meta_data(SrsOnMetaDataPacket* m)
{
int ret = ERROR_SUCCESS;
// the dvr for this stream is not actived.
if (!actived) {
return ret;
}
int size = 0;
char* payload = NULL;
if ((ret = m->encode(size, payload)) != ERROR_SUCCESS) {
return ret;
}
SrsSharedPtrMessage metadata;
if ((ret = metadata.create(NULL, payload, size)) != ERROR_SUCCESS) {
return ret;
}
if ((ret = plan->on_meta_data(&metadata)) != ERROR_SUCCESS) {
return ret;
}
return ret;
}
int SrsDvr::on_audio(SrsSharedPtrMessage* shared_audio)
{
// the dvr for this stream is not actived.
if (!actived) {
return ERROR_SUCCESS;
}
return plan->on_audio(shared_audio);
}
int SrsDvr::on_video(SrsSharedPtrMessage* shared_video)
{
// the dvr for this stream is not actived.
if (!actived) {
return ERROR_SUCCESS;
}
return plan->on_video(shared_video);
}
int SrsDvr::on_reload_vhost_dvr_apply(string vhost)
{
int ret = ERROR_SUCCESS;
SrsConfDirective* conf = _srs_config->get_dvr_apply(req->vhost);
bool v = srs_config_apply_filter(conf, req);
// the apply changed, republish the dvr.
if (v != actived) {
actived = v;
on_unpublish();
return on_publish(true);
}
return ret;
}
#endif
| {'content_hash': '56fa0a3359ee60955091b8b578bdce89', 'timestamp': '', 'source': 'github', 'line_count': 1080, 'max_line_length': 107, 'avg_line_length': 23.558333333333334, 'alnum_prop': 0.5915969028809496, 'repo_name': 'myself659/simple-rtmp-server', 'id': '3b0971af8c825572c40d3cd3029cb902c31be072', 'size': '26543', 'binary': False, 'copies': '4', 'ref': 'refs/heads/develop', 'path': 'trunk/src/app/srs_app_dvr.cpp', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'ActionScript', 'bytes': '89221'}, {'name': 'Assembly', 'bytes': '3935'}, {'name': 'C', 'bytes': '191615'}, {'name': 'C++', 'bytes': '3441502'}, {'name': 'CMake', 'bytes': '4269'}, {'name': 'HTML', 'bytes': '107145'}, {'name': 'JavaScript', 'bytes': '76794'}, {'name': 'Makefile', 'bytes': '10041'}, {'name': 'Python', 'bytes': '36962'}, {'name': 'QMake', 'bytes': '704'}, {'name': 'Shell', 'bytes': '172800'}]} |
from os import path
import unittest
from mock import patch
from prudentia.domain import Box
from prudentia.ssh import SshCli
class TestSshCli(unittest.TestCase):
def setUp(self):
self.tests_path = path.dirname(path.realpath(__file__))
self.cli = SshCli()
def test_define_box(self):
expected_box = Box(
name='ssh-test-box',
playbook=path.join(self.tests_path, 'uname.yml'),
hostname='localhost',
ip='1.2.3.4',
remote_user='nobody',
remote_pwd=''
)
def input_side_effect(*args, **kwargs):
input_values = {
'playbook path': 'uname.yml',
'box name': expected_box.name,
'instance address or inventory': '1.2.3.4',
'remote user': 'nobody',
'password for the remote user': ''
}
return input_values[args[0]]
with patch('prudentia.utils.io.input_value') as iv:
iv.side_effect = input_side_effect
self.cli.do_register(None)
self.assertEqual(self.cli.provider.get_box(expected_box.name), expected_box)
def test_redefine_box(self):
expected_box = Box(
name='ssh-test-box',
playbook=path.join(self.tests_path, 'uname.yml'),
hostname='localhost',
ip='4.3.2.1',
remote_user='everybody',
remote_pwd='xxx'
)
def input_side_effect(*args, **kwargs):
input_values = {
'playbook path': 'uname.yml',
'box name': expected_box.name,
'instance address or inventory': '4.3.2.1',
'remote user': 'everybody',
'password for the remote user': 'xxx'
}
return input_values[args[0]]
with patch('prudentia.utils.io.input_value') as iv:
iv.side_effect = input_side_effect
self.cli.do_reconfigure(expected_box.name)
self.assertEqual(self.cli.provider.get_box(expected_box.name), expected_box)
| {'content_hash': '44311e27168bb0c3a79838ab9ad03fea', 'timestamp': '', 'source': 'github', 'line_count': 62, 'max_line_length': 88, 'avg_line_length': 33.96774193548387, 'alnum_prop': 0.5408357075023742, 'repo_name': 'StarterSquad/prudentia', 'id': '5bae83e5318472bc4d61a1a08999f1bb7e41129d', 'size': '2106', 'binary': False, 'copies': '1', 'ref': 'refs/heads/develop', 'path': 'tests/ssh_cli_test.py', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'Python', 'bytes': '87577'}, {'name': 'Shell', 'bytes': '4010'}]} |
export default class UpdatingCollectionFactory {
constructor(resource){
let self = this;
this.resource = resource;
this.socket = new WebSocket(`ws://${resource}`);
this.collection = [];
this.retried = 0;
this._setUpSocket = ()=>{
this.socket.onmessage = (message) => {
var body = JSON.parse(message.data);
console.log(body);
body.SELECT ? this.collection = this.collection = body.SELECT : ""
body.INSERT ? this.collection = this.collection.concat(body.INSERT) : ""
}
this.socket.onopen = (evt) => {
this.socket.send(JSON.stringify({method: "GET"}));
}
this.socket.onerror = (err) => {
console.log(err.type);
}
this.socket.onclose = (evt) => {
console.log("that dang nginx downgraded the connection again.");
self._attemptReconnect();
}
}
this._attemptReconnect = () => {
console.log('attempt reconnect');
console.log(this.resource);
if(self.retried < 1){
self.socket = new WebSocket(`ws://${this.resource}`);
self._setUpSocket();
self.retried += 1;
} else {
console.log("Attempted reconnect failed!");
}
}
this._setUpSocket();
return new Proxy(this, {
get: function(target, inx){
if(typeof target[inx] === "function"){
return (...args)=>{
return target[inx](args);
}
} else if(!isNaN(parseInt(inx))){
return target.collection[parseInt(inx)]
} else if(inx === 'length'){
return target.collection.length
}
}
})
}
push(element){
this.socket.send(JSON.stringify({method: "POST", body: element[0]}));
}
}
| {'content_hash': '8e03fe6177c81f6efa9bb60349f87107', 'timestamp': '', 'source': 'github', 'line_count': 56, 'max_line_length': 100, 'avg_line_length': 48.107142857142854, 'alnum_prop': 0.3585746102449889, 'repo_name': 'PWAckerman/wrest', 'id': 'dd38ca9e25a433be3bfffaa035ed866678744cd9', 'size': '2694', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'client/UpdatingCollectionFactory.js', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'JavaScript', 'bytes': '26026'}, {'name': 'Shell', 'bytes': '1348'}]} |
package org.hisp.dhis.android.app.views.dashboard.navigation.widget;
import org.hisp.dhis.client.sdk.ui.bindings.presenters.Presenter;
/**
* Created by thomaslindsjorn on 19/10/16.
*/
public interface TeiWidgetPresenter extends Presenter {
void drawWidgets(String enrollmentUid);
}
| {'content_hash': '2a9b69d9b1f21ff36e772a08b1e5e0c5', 'timestamp': '', 'source': 'github', 'line_count': 12, 'max_line_length': 68, 'avg_line_length': 24.333333333333332, 'alnum_prop': 0.7808219178082192, 'repo_name': 'erlingfjelstad/dhis2-android-data-collector', 'id': 'de236148552c4115bc7e037841c2982e76180736', 'size': '292', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'app/src/main/java/org/hisp/dhis/android/app/views/dashboard/navigation/widget/TeiWidgetPresenter.java', 'mode': '33188', 'license': 'bsd-3-clause', 'language': [{'name': 'Java', 'bytes': '420429'}]} |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.