hexsha
stringlengths 40
40
| size
int64 3
1.05M
| ext
stringclasses 163
values | lang
stringclasses 53
values | max_stars_repo_path
stringlengths 3
945
| max_stars_repo_name
stringlengths 4
112
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
float64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
945
| max_issues_repo_name
stringlengths 4
113
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
float64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
945
| max_forks_repo_name
stringlengths 4
113
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
float64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 3
1.05M
| avg_line_length
float64 1
966k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ed55e1c1344f0c93366cfe52e8b5452691a6b8cb
| 734 |
t
|
Perl
|
t/zip-x.t
|
damil/App-Zip-Extract
|
b5a93f05cc424d013908ae1e54a6455c07f1b5ce
|
[
"ClArtistic"
] | null | null | null |
t/zip-x.t
|
damil/App-Zip-Extract
|
b5a93f05cc424d013908ae1e54a6455c07f1b5ce
|
[
"ClArtistic"
] | null | null | null |
t/zip-x.t
|
damil/App-Zip-Extract
|
b5a93f05cc424d013908ae1e54a6455c07f1b5ce
|
[
"ClArtistic"
] | null | null | null |
use strict;
use warnings;
use Test::More;
use lib "../lib";
use App::Zip::X;
diag( "Testing App::Zip::X $App::Zip::X::VERSION, Perl $], $^X" );
# path to the docx file
(my $docx = $0) =~ s[zip-x\.t$][etc/zip-x.docx];
# run, capturing STDOUT
close STDOUT;
open STDOUT, ">", \my $capture_stdout
or die "could not redirect STDOUT: $!" ;
App::Zip::X->run("-xml", -archive => $docx, -member => "word/document.xml");
# just 3 very simple tests
like $capture_stdout, qr/^<\?xml/, "is XML content";
like $capture_stdout, qr/^ <w:body>/m, "is indented";
like $capture_stdout, qr/^ <w:p/m, "is really indented";
# that's it
done_testing();
# TODO : should also test the "-zip" feature (replacing a member into an archive)
| 23.677419 | 81 | 0.632153 |
ed6c7f4b797fa1a0e7895485ce3fbed35cd3899b
| 16,223 |
pm
|
Perl
|
perl/local/lib/perl5/x86_64-linux/Text/Xslate/PP/Opcode.pm
|
MasamiMishima/isucon3
|
3d874a1d330212131289da0991cc648898d9d61f
|
[
"MIT"
] | null | null | null |
perl/local/lib/perl5/x86_64-linux/Text/Xslate/PP/Opcode.pm
|
MasamiMishima/isucon3
|
3d874a1d330212131289da0991cc648898d9d61f
|
[
"MIT"
] | 1 |
2019-03-10T05:33:03.000Z
|
2019-03-10T09:26:46.000Z
|
perl/local/lib/perl5/x86_64-linux/Text/Xslate/PP/Opcode.pm
|
takashabe/isucon3-qual
|
9b35f0299b76f38960d3594fd05c8467dab8dbe9
|
[
"MIT"
] | null | null | null |
package Text::Xslate::PP::Opcode;
use Mouse;
extends qw(Text::Xslate::PP::State);
our $VERSION = '2.0009';
use Carp ();
use Scalar::Util ();
use Text::Xslate::PP;
use Text::Xslate::PP::Const;
use Text::Xslate::PP::Method;
use Text::Xslate::Util qw(
p neat
mark_raw unmark_raw html_escape uri_escape
$DEBUG
);
use constant _DUMP_PP => scalar($DEBUG =~ /\b dump=pp \b/xms);
no warnings 'recursion';
if(!Text::Xslate::PP::_PP_ERROR_VERBOSE()) {
our @CARP_NOT = qw(
Text::Xslate
);
}
our $_current_frame;
#
#
#
sub op_noop {
goto $_[0]->{ code }->[ ++$_[0]->{ pc } ]->{ exec_code };
}
sub op_move_to_sb {
$_[0]->{sb} = $_[0]->{sa};
goto $_[0]->{ code }->[ ++$_[0]->{ pc } ]->{ exec_code };
}
sub op_move_from_sb {
$_[0]->{sa} = $_[0]->{sb};
goto $_[0]->{ code }->[ ++$_[0]->{ pc } ]->{ exec_code };
}
sub op_save_to_lvar {
tx_access_lvar( $_[0], $_[0]->op_arg, $_[0]->{sa} );
goto $_[0]->{ code }->[ ++$_[0]->{ pc } ]->{ exec_code };
}
sub op_load_lvar {
$_[0]->{sa} = tx_access_lvar( $_[0], $_[0]->op_arg );
goto $_[0]->{ code }->[ ++$_[0]->{ pc } ]->{ exec_code };
}
sub op_load_lvar_to_sb {
$_[0]->{sb} = tx_access_lvar( $_[0], $_[0]->op_arg );
goto $_[0]->{ code }->[ ++$_[0]->{ pc } ]->{ exec_code };
}
sub op_localize_s {
my($st) = @_;
my $key = $st->op_arg;
my $newval = $st->{sa};
$st->localize($key, $newval);
goto $st->{ code }->[ ++$st->{ pc } ]->{ exec_code };
}
sub op_localize_vars {
my($st) = @_;
my $new_vars = $st->{sa};
my $old_vars = $st->vars;
if(ref($new_vars) ne 'HASH') {
$st->warn(undef, "Variable map must be a HASH reference");
}
push @{ $st->{local_stack} }, bless sub {
$st->vars($old_vars);
return;
}, 'Text::Xslate::PP::Guard';
$st->vars($new_vars);
goto $st->{ code }->[ ++$st->{ pc } ]->{ exec_code };
}
sub op_push {
push @{ $_[0]->{ SP }->[ -1 ] }, $_[0]->{sa};
goto $_[0]->{ code }->[ ++$_[0]->{ pc } ]->{ exec_code };
}
sub op_pushmark {
push @{ $_[0]->{ SP } }, [];
goto $_[0]->{ code }->[ ++$_[0]->{ pc } ]->{ exec_code };
}
sub op_nil {
$_[0]->{sa} = undef;
goto $_[0]->{ code }->[ ++$_[0]->{ pc } ]->{ exec_code };
}
sub op_vars {
$_[0]->{sa} = $_[0]->{vars};
goto $_[0]->{ code }->[ ++$_[0]->{ pc } ]->{ exec_code };
}
sub op_literal {
$_[0]->{sa} = $_[0]->op_arg;
goto $_[0]->{ code }->[ ++$_[0]->{ pc } ]->{ exec_code };
}
sub op_literal_i {
$_[0]->{sa} = $_[0]->op_arg;
goto $_[0]->{ code }->[ ++$_[0]->{ pc } ]->{ exec_code };
}
sub op_fetch_s {
$_[0]->{sa} = $_[0]->{vars}->{ $_[0]->op_arg };
goto $_[0]->{ code }->[ ++$_[0]->{ pc } ]->{ exec_code };
}
sub op_fetch_field {
my($st) = @_;
my $var = $st->{sb};
my $key = $st->{sa};
$st->{sa} = $st->fetch($var, $key);
goto $st->{ code }->[ ++$st->{ pc } ]->{ exec_code };
}
sub op_fetch_field_s {
my($st) = @_;
my $var = $st->{sa};
my $key = $st->op_arg;
$st->{sa} = $st->fetch($var, $key);
goto $st->{ code }->[ ++$st->{ pc } ]->{ exec_code };
}
sub op_print {
my($st) = @_;
$st->print($st->{sa});
goto $st->{ code }->[ ++$st->{ pc } ]->{ exec_code };
}
sub op_print_raw {
my($st) = @_;
if(defined $st->{sa}) {
$st->{ output } .= $st->{sa};
}
else {
$st->warn( undef, "Use of nil to print" );
}
goto $st->{ code }->[ ++$st->{ pc } ]->{ exec_code };
}
sub op_print_raw_s {
$_[0]->{ output } .= $_[0]->op_arg;
goto $_[0]->{ code }->[ ++$_[0]->{ pc } ]->{ exec_code };
}
sub op_include {
my($st) = @_;
my $child = Text::Xslate::PP::tx_load_template( $st->engine, $st->{sa}, 1 );
$st->push_frame('include', undef);
my $output = Text::Xslate::PP::tx_execute( $child, $st->{vars} );
$st->pop_frame(0);
$st->{output} .= $output;
goto $st->{ code }->[ ++$st->{ pc } ]->{ exec_code };
}
sub op_find_file {
$_[0]->{sa} = eval { $_[0]->engine->find_file($_[0]->{sa}); 1 };
goto $_[0]->{ code }->[ ++$_[0]->{ pc } ]->{ exec_code };
}
sub op_suffix {
$_[0]->{sa} = $_[0]->engine->{suffix};
goto $_[0]->{ code }->[ ++$_[0]->{ pc } ]->{ exec_code };
}
sub op_for_start {
my($st) = @_;
my $id = $st->op_arg;
my $ar = Text::Xslate::PP::tx_check_itr_ar($st, $st->{sa});
#tx_access_lvar( $st, $id + TXfor_ITEM, undef );
tx_access_lvar( $st, $id + TXfor_ITER, -1 );
tx_access_lvar( $st, $id + TXfor_ARRAY, $ar );
goto $st->{ code }->[ ++$st->{ pc } ]->{ exec_code };
}
sub op_for_iter {
my($st) = @_;
my $id = $st->{sa};
my $av = tx_access_lvar( $st, $id + TXfor_ARRAY );
if(defined $av) {
my $i = tx_access_lvar( $st, $id + TXfor_ITER );
$av = [ $av ] unless ref $av;
if ( ++$i < scalar(@{ $av }) ) {
tx_access_lvar( $st, $id + TXfor_ITEM, $av->[ $i ] );
tx_access_lvar( $st, $id + TXfor_ITER, $i );
goto $st->{ code }->[ ++$st->{ pc } ]->{ exec_code };
}
else {
# finish the loop
$st->{sa} = ( $i > 0 ); # for 'for-else' block
tx_access_lvar( $st, $id + TXfor_ITEM, undef );
tx_access_lvar( $st, $id + TXfor_ITER, undef );
tx_access_lvar( $st, $id + TXfor_ARRAY, undef );
}
}
# finish
$st->{ pc } = $st->op_arg;
goto $st->{ code }->[ $st->{ pc } ]->{ exec_code };
}
sub op_add {
$_[0]->{sa} = $_[0]->{sb} + $_[0]->{sa};
goto $_[0]->{ code }->[ ++$_[0]->{ pc } ]->{ exec_code };
}
sub op_sub {
$_[0]->{sa} = $_[0]->{sb} - $_[0]->{sa};
goto $_[0]->{ code }->[ ++$_[0]->{ pc } ]->{ exec_code };
}
sub op_mul {
$_[0]->{sa} = $_[0]->{sb} * $_[0]->{sa};
goto $_[0]->{ code }->[ ++$_[0]->{ pc } ]->{ exec_code };
}
sub op_div {
$_[0]->{sa} = $_[0]->{sb} / $_[0]->{sa};
goto $_[0]->{ code }->[ ++$_[0]->{ pc } ]->{ exec_code };
}
sub op_mod {
my($st) = @_;
my $lhs = int $st->{sb};
my $rhs = int $st->{sa};
if($rhs == 0) {
$st->error(undef, "Illegal modulus zero");
$st->{sa} = 'NaN';
}
else {
$st->{sa} = $lhs % $rhs;
}
goto $st->{ code }->[ ++$st->{ pc } ]->{ exec_code };
}
sub op_concat {
my($st) = @_;
$st->{sa} = Text::Xslate::PP::tx_concat($st->{sb}, $st->{sa});
goto $st->{ code }->[ ++$st->{ pc } ]->{ exec_code };
}
sub op_repeat {
my($st) = @_;
$st->{sa} = Text::Xslate::PP::tx_repeat($st->{sb}, $st->{sa});
goto $st->{ code }->[ ++$st->{ pc } ]->{ exec_code };
}
sub op_bitor {
$_[0]->{sa} = int($_[0]->{sb}) | int($_[0]->{sa});
goto $_[0]->{ code }->[ ++$_[0]->{ pc } ]->{ exec_code };
}
sub op_bitand {
$_[0]->{sa} = int($_[0]->{sb}) & int($_[0]->{sa});
goto $_[0]->{ code }->[ ++$_[0]->{ pc } ]->{ exec_code };
}
sub op_bitxor {
$_[0]->{sa} = int($_[0]->{sb}) ^ int($_[0]->{sa});
goto $_[0]->{ code }->[ ++$_[0]->{ pc } ]->{ exec_code };
}
sub op_bitneg {
$_[0]->{sa} = ~int($_[0]->{sa});
goto $_[0]->{ code }->[ ++$_[0]->{ pc } ]->{ exec_code };
}
sub op_and {
if ( $_[0]->{sa} ) {
goto $_[0]->{ code }->[ ++$_[0]->{ pc } ]->{ exec_code };
}
else {
$_[0]->{ pc } = $_[0]->op_arg;
goto $_[0]->{ code }->[ $_[0]->{ pc } ]->{ exec_code };
}
}
sub op_dand {
if ( defined $_[0]->{sa} ) {
goto $_[0]->{ code }->[ ++$_[0]->{ pc } ]->{ exec_code };
}
else {
$_[0]->{ pc } = $_[0]->op_arg;
goto $_[0]->{ code }->[ $_[0]->{ pc } ]->{ exec_code };
}
}
sub op_or {
if ( ! $_[0]->{sa} ) {
goto $_[0]->{ code }->[ ++$_[0]->{ pc } ]->{ exec_code };
}
else {
$_[0]->{ pc } = $_[0]->op_arg;
goto $_[0]->{ code }->[ $_[0]->{ pc } ]->{ exec_code };
}
}
sub op_dor {
my $sv = $_[0]->{sa};
if ( defined $sv ) {
$_[0]->{ pc } = $_[0]->op_arg;
goto $_[0]->{ code }->[ $_[0]->{ pc } ]->{ exec_code };
}
else {
goto $_[0]->{ code }->[ ++$_[0]->{ pc } ]->{ exec_code };
}
}
sub op_not {
$_[0]->{sa} = ! $_[0]->{sa};
goto $_[0]->{ code }->[ ++$_[0]->{ pc } ]->{ exec_code };
}
sub op_minus {
$_[0]->{sa} = -$_[0]->{sa};
goto $_[0]->{ code }->[ ++$_[0]->{ pc } ]->{ exec_code };
}
sub op_max_index {
$_[0]->{sa} = scalar(@{ $_[0]->{sa} }) - 1;
goto $_[0]->{ code }->[ ++$_[0]->{ pc } ]->{ exec_code };
}
sub op_builtin_mark_raw {
$_[0]->{sa} = mark_raw($_[0]->{sa});
goto $_[0]->{ code }->[ ++$_[0]->{ pc } ]->{ exec_code };
}
sub op_builtin_unmark_raw {
$_[0]->{sa} = unmark_raw($_[0]->{sa});
goto $_[0]->{ code }->[ ++$_[0]->{ pc } ]->{ exec_code };
}
sub op_builtin_html_escape {
$_[0]->{sa} = html_escape($_[0]->{sa});
goto $_[0]->{ code }->[ ++$_[0]->{ pc } ]->{ exec_code };
}
sub op_builtin_uri_escape {
$_[0]->{sa} = uri_escape($_[0]->{sa});
goto $_[0]->{ code }->[ ++$_[0]->{ pc } ]->{ exec_code };
}
sub op_builtin_is_array_ref {
$_[0]->{sa} = Text::Xslate::Util::is_array_ref($_[0]->{sa});
goto $_[0]->{ code }->[ ++$_[0]->{ pc } ]->{ exec_code };
}
sub op_builtin_is_hash_ref {
$_[0]->{sa} = Text::Xslate::Util::is_hash_ref($_[0]->{sa});
goto $_[0]->{ code }->[ ++$_[0]->{ pc } ]->{ exec_code };
}
sub op_is_code_ref {
$_[0]->{sa} = Text::Xslate::Util::is_code_ref($_[0]->{sa});
goto $_[0]->{ code }->[ ++$_[0]->{ pc } ]->{ exec_code };
}
sub op_match {
$_[0]->{sa} = Text::Xslate::PP::tx_match($_[0]->{sb}, $_[0]->{sa});
goto $_[0]->{ code }->[ ++$_[0]->{ pc } ]->{ exec_code };
}
sub op_eq {
$_[0]->{sa} = Text::Xslate::PP::tx_sv_eq($_[0]->{sb}, $_[0]->{sa});
goto $_[0]->{ code }->[ ++$_[0]->{ pc } ]->{ exec_code };
}
sub op_ne {
$_[0]->{sa} = !Text::Xslate::PP::tx_sv_eq($_[0]->{sb}, $_[0]->{sa});
goto $_[0]->{ code }->[ ++$_[0]->{ pc } ]->{ exec_code };
}
sub op_lt {
$_[0]->{sa} = $_[0]->{sb} < $_[0]->{sa};
goto $_[0]->{ code }->[ ++$_[0]->{ pc } ]->{ exec_code };
}
sub op_le {
$_[0]->{sa} = $_[0]->{sb} <= $_[0]->{sa};
goto $_[0]->{ code }->[ ++$_[0]->{ pc } ]->{ exec_code };
}
sub op_gt {
$_[0]->{sa} = $_[0]->{sb} > $_[0]->{sa};
goto $_[0]->{ code }->[ ++$_[0]->{ pc } ]->{ exec_code };
}
sub op_ge {
$_[0]->{sa} = $_[0]->{sb} >= $_[0]->{sa};
goto $_[0]->{ code }->[ ++$_[0]->{ pc } ]->{ exec_code };
}
sub op_ncmp {
$_[0]->{sa} = $_[0]->{sb} <=> $_[0]->{sa};
goto $_[0]->{ code }->[ ++$_[0]->{ pc } ]->{ exec_code };
}
sub op_scmp {
$_[0]->{sa} = $_[0]->{sb} cmp $_[0]->{sa};
goto $_[0]->{ code }->[ ++$_[0]->{ pc } ]->{ exec_code };
}
sub op_range {
my($self) = @_;
push @{ $self->{ SP }->[ -1 ] }, ($self->{sb} .. $self->{sa});
goto $self->{ code }->[ ++$self->{ pc } ]->{ exec_code };
}
sub op_fetch_symbol {
my($st) = @_;
my $name = $st->op_arg;
$st->{sa} = $st->fetch_symbol($name);
goto $st->{ code }->[ ++$st->{ pc } ]->{ exec_code };
}
sub tx_macro_enter {
my($st, $macro, $retaddr) = @_;
my $name = $macro->name;
my $addr = $macro->addr;
my $nargs = $macro->nargs;
my $outer = $macro->outer;
my $args = pop @{ $st->{SP} };
print STDERR " " x $st->current_frame, "tx_macro_enter($name) to $retaddr\n" if _DUMP_PP;
if(@{$args} != $nargs) {
$st->error(undef, "Wrong number of arguments for %s (%d %s %d)",
$name, scalar(@{$args}), scalar(@{$args}) > $nargs ? '>' : '<', $nargs);
$st->{ sa } = undef;
$st->{ pc }++;
return;
}
my $cframe = $st->push_frame($name, $retaddr);
$cframe->[ TXframe_OUTPUT ] = $st->{ output };
$st->{ output } = '';
my $i = 0;
if($outer > 0) {
# copies lexical variables from the old frame to the new one
my $oframe = $st->frame->[ $st->current_frame - 1 ];
for(; $i < $outer; $i++) {
my $real_ix = $i + TXframe_START_LVAR;
$cframe->[$real_ix] = $oframe->[$real_ix];
}
}
for my $val (@{$args}) {
tx_access_lvar( $st, $i++, $val );
}
$st->{ pc } = $addr;
if($st->{code}->[$addr]->{opname} ne 'macro_begin') {
Carp::croak("Oops: entering non-macros: ", p($st->{code}->[$addr]));
}
return;
}
sub op_macro_end {
my($st) = @_;
my $top = $st->frame->[ $st->current_frame ];
printf STDERR "%stx_macro_end(%s)]\n", ' ' x $st->current_frame - 1, $top->[ TXframe_NAME ] if _DUMP_PP;
$st->{sa} = mark_raw( $st->{ output } );
$st->pop_frame(1);
$st->{ pc } = $top->[ TXframe_RETADDR ];
goto $st->{ code }->[ $st->{ pc } ]->{ exec_code };
}
sub op_funcall {
my($st) = @_;
my $func = $st->{sa};
if(ref $func eq TXt_MACRO) {
tx_macro_enter($st, $func, $st->{ pc } + 1);
goto $st->{ code }->[ $st->{ pc } ]->{ exec_code };
}
else {
$st->{sa} = tx_funcall( $st, $func );
goto $st->{ code }->[ ++$st->{ pc } ]->{ exec_code };
}
}
sub op_methodcall_s {
my($st) = @_;
$st->{sa} = Text::Xslate::PP::Method::tx_methodcall(
$st, undef, $st->op_arg, @{ pop @{ $st->{SP} } });
goto $st->{ code }->[ ++$st->{ pc } ]->{ exec_code };
}
sub op_make_array {
my $args = pop @{ $_[0]->{SP} };
$_[0]->{sa} = $args;
goto $_[0]->{ code }->[ ++$_[0]->{ pc } ]->{ exec_code };
}
sub op_make_hash {
my $args = pop @{ $_[0]->{SP} };
$_[0]->{sa} = { @{$args} };
goto $_[0]->{ code }->[ ++$_[0]->{ pc } ]->{ exec_code };
}
sub op_merge_hash {
$_[0]->{sa} = Text::Xslate::Util::merge_hash($_[0]->{sa}, $_[0]->{sb});
goto $_[0]->{ code }->[ ++$_[0]->{ pc } ]->{ exec_code };
}
sub op_enter {
push @{$_[0]->{save_local_stack} ||= []}, delete $_[0]->{local_stack};
goto $_[0]->{ code }->[ ++$_[0]->{ pc } ]->{ exec_code };
}
sub op_leave {
$_[0]->{local_stack} = pop @{$_[0]->{save_local_stack}};
goto $_[0]->{ code }->[ ++$_[0]->{ pc } ]->{ exec_code };
}
sub op_goto {
$_[0]->{ pc } = $_[0]->op_arg;
goto $_[0]->{ code }->[ $_[0]->{ pc } ]->{ exec_code };
}
sub op_end {
my($st) = @_;
printf STDERR "op_end at %d\n", $st->{pc} if _DUMP_PP;
$st->{ pc } = $st->code_len;
if($st->current_frame != 0) {
#Carp::croak("Oops: broken stack frame:" . p($st->frame));
}
return;
}
sub op_depend; *op_depend = \&op_noop;
sub op_macro_begin; *op_macro_begin = \&op_noop;
sub op_macro_nargs; *op_macro_nargs = \&op_noop;
sub op_macro_outer; *op_macro_outer = \&op_noop;
sub op_set_opinfo; *op_set_opinfo = \&op_noop;
sub op_super; *op_super = \&op_noop;
#
# INTERNAL COMMON FUNCTIONS
#
sub tx_access_lvar {
return $_[0]->pad->[ $_[1] + TXframe_START_LVAR ] if @_ == 2;
$_[0]->pad->[ $_[1] + TXframe_START_LVAR ] = $_[2];
}
sub tx_funcall {
my ( $st, $proc ) = @_;
my ( @args ) = @{ pop @{ $st->{ SP } } };
my $ret;
if(!defined $proc) {
my $c = $st->{code}->[ $st->{pc} - 1 ];
$st->error( undef, "Undefined function%s is called",
$c->{ opname } eq 'fetch_s' ? " $c->{arg}()" : ""
);
}
else {
$ret = eval { $proc->( @args ) };
$st->error( undef, "%s", $@) if $@;
}
return $ret;
}
sub proccall {
my($st, $proc) = @_;
if(ref $proc eq TXt_MACRO) {
local $st->{pc} = $st->{pc};
tx_macro_enter($st, $proc, $st->{code_len});
$st->{code}->[ $st->{pc} ]->{ exec_code }->( $st );
return $st->{sa};
}
else {
return tx_funcall($st, $proc);
}
}
no Mouse;
__PACKAGE__->meta->make_immutable();
__END__
=head1 NAME
Text::Xslate::PP::Opcode - Text::Xslate opcode implementation in pure Perl
=head1 DESCRIPTION
This module is a pure Perl implementation of the Xslate opcodes.
The is enabled with C<< $ENV{ENV}='pp=opcode' >>.
=head1 SEE ALSO
L<Text::Xslate>
L<Text::Xslate::PP>
=head1 AUTHOR
Makamaka Hannyaharamitu E<lt>makamaka at cpan.orgE<gt>
Text::Xslate was written by Fuji, Goro (gfx).
=head1 LICENSE AND COPYRIGHT
Copyright (c) 2010 by Makamaka Hannyaharamitu (makamaka).
This library is free software; you can redistribute it and/or modify
it under the same terms as Perl itself.
=cut
| 24.141369 | 108 | 0.458916 |
73edeb04f3359a59a12f5ac3bd53a35aadefd26f
| 5,163 |
pm
|
Perl
|
lib/Bio/EnsEMBL/DataCheck/Pipeline/DataCheckSummary.pm
|
danstaines/ensembl-datacheck
|
eea8117dec950a627d7ebb1adba95d125e245e87
|
[
"Apache-2.0"
] | null | null | null |
lib/Bio/EnsEMBL/DataCheck/Pipeline/DataCheckSummary.pm
|
danstaines/ensembl-datacheck
|
eea8117dec950a627d7ebb1adba95d125e245e87
|
[
"Apache-2.0"
] | null | null | null |
lib/Bio/EnsEMBL/DataCheck/Pipeline/DataCheckSummary.pm
|
danstaines/ensembl-datacheck
|
eea8117dec950a627d7ebb1adba95d125e245e87
|
[
"Apache-2.0"
] | null | null | null |
=head1 LICENSE
Copyright [2018-2022] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=head1 NAME
Bio::EnsEMBL::DataCheck::Pipeline::DataCheckSummary
=head1 DESCRIPTION
Store summary of datacheck results, and optionally send it via email.
=cut
package Bio::EnsEMBL::DataCheck::Pipeline::DataCheckSummary;
use strict;
use warnings;
use feature 'say';
use JSON;
use Time::Piece;
use base ('Bio::EnsEMBL::Hive::RunnableDB::NotifyByEmail');
sub run {
my $self = shift;
my $submission_job_id = $self->param('submission_job_id');
my $history_file = $self->param('history_file');
my $output_dir = $self->param('output_dir');
my $json_output_file = $self->param('json_output_file');
my $json_passed = $self->param('json_passed');
my $tag = $self->param('tag');
my $email = $self->param('email');
my $timestamp = $self->param('timestamp');
my $end_timestamp = localtime->cdate;
my $start = Time::Piece->strptime($timestamp,'%a %b %d %H:%M:%S %Y');
my $end = Time::Piece->strptime($end_timestamp,'%a %b %d %H:%M:%S %Y');
my $runtime_sec = $end - $start;
my $sql = q/
SELECT dbname, passed, failed, skipped FROM datacheck_results
WHERE submission_job_id = ?
ORDER BY dbname
/;
my $sth = $self->dbc->prepare($sql);
$sth->execute($submission_job_id);
my ($passed_total, $failed_total) = (0, 0);
my %results;
my $results = $sth->fetchall_arrayref();
foreach my $result (@$results) {
my ($dbname, $passed, $failed, $skipped) = @$result;
$failed ? $failed_total++ : $passed_total++;
$results{$dbname}{passed} = $passed;
$results{$dbname}{failed} = $failed;
$results{$dbname}{skipped} = $skipped;
}
my %output = (
databases => \%results,
passed_total => $passed_total,
failed_total => $failed_total,
history_file => $history_file,
output_dir => $output_dir,
json_output_file => $json_output_file,
json_passed => $json_passed,
tag => $tag,
timestamp => $end_timestamp,
runtime_sec => "$runtime_sec",
);
$self->param('output', \%output);
if (defined $email) {
$self->set_email_parameters();
$self->SUPER::run();
}
}
sub write_output {
my $self = shift;
my $output = {
job_id => $self->param('submission_job_id'),
output => JSON->new->pretty->encode($self->param('output')),
};
$self->dataflow_output_id($output, 1);
}
sub set_email_parameters {
my $self = shift;
my %output = %{ $self->param('output') };
my $db_text;
foreach my $dbname (sort keys %{$output{databases}}) {
$db_text .= "\tpassed: " . $output{databases}{$dbname}{passed};
$db_text .= "\tfailed: " . $output{databases}{$dbname}{failed};
$db_text .= "\tskipped: " . $output{databases}{$dbname}{skipped};
$db_text .= "\t$dbname\n";
}
my $subject;
if ($output{failed_total}) {
$subject = "FAIL: Datacheck Summary";
} else {
$subject = "PASS: Datacheck Summary";
}
my $passed_db = $output{passed_total} == 1 ? 'database' : 'databases';
my $failed_db = $output{failed_total} == 1 ? 'database' : 'databases';
my $text = "All datachecks have completed.\n".
$output{passed_total} . " $passed_db passed all datachecks, ".
$output{failed_total} . " $failed_db failed one or more datachecks.\n";
my $tag = $output{tag};
if (defined $tag) {
$subject .= " ($tag)";
$text .= "Submission tag: $tag\n";
}
$self->param('subject', $subject);
$text .= "Details:\n$db_text";
my $history_file = $output{history_file};
if (defined $history_file) {
$text .= "The datacheck results were stored in a history file: $history_file\n";
} else {
$text .= "The datacheck results were not stored in a history file.\n";
}
my $output_dir = $output{output_dir};
if (defined $output_dir) {
$text .= "The full output of the datachecks were stored in: $output_dir\n";
} else {
$text .= "The full output of the datachecks were not stored.\n";
}
my $json_output_file = $output{json_output_file};
if (defined $json_output_file) {
if ($output{json_passed}) {
$text .= "All results were stored in JSON format: $json_output_file\n";
} else {
$text .= "Failures were stored in JSON format: $json_output_file\n";
}
if (-s $json_output_file < 2e6) {
push @{$self->param('attachments')}, $json_output_file;
} else {
$text .= "(JSON file not attached because it exceeds 2MB limit)";
}
} else {
$text .= "The results were not stored in JSON format.\n";
}
$self->param('text', $text);
}
1;
| 29.169492 | 84 | 0.638389 |
ed79639de902c4ea530a6164361a8462411a545f
| 4,837 |
t
|
Perl
|
t/author-locale-yue.t
|
kentfredric/t-DTFST
|
77e85bf86fa608312d85f413a6babe008a8cabc8
|
[
"Artistic-2.0"
] | 1 |
2020-01-21T11:29:08.000Z
|
2020-01-21T11:29:08.000Z
|
t/author-locale-yue.t
|
kentfredric/t-DTFST
|
77e85bf86fa608312d85f413a6babe008a8cabc8
|
[
"Artistic-2.0"
] | null | null | null |
t/author-locale-yue.t
|
kentfredric/t-DTFST
|
77e85bf86fa608312d85f413a6babe008a8cabc8
|
[
"Artistic-2.0"
] | null | null | null |
BEGIN {
unless ($ENV{AUTHOR_TESTING}) {
require Test::More;
Test::More::plan(skip_all => 'these tests are for testing by the author');
}
}
use strict;
use warnings;
use Test::More 0.96;
use Test::Fatal;
use DateTime::Format::Strptime;
use DateTime::Locale;
use DateTime;
my $code_meth = DateTime::Locale->load('en')->can('code') ? 'code' : 'id';
my $locale = 'yue';
test_days($locale);
test_months($locale);
test_am_pm($locale);
test_locale($locale);
done_testing();
sub test_days {
my $locale = shift;
subtest(
'days',
sub {
foreach my $day ( 1 .. 7 ) {
subtest(
"Day $day",
sub { _test_one_day( $locale, $day ); },
);
}
}
);
}
sub _test_one_day {
my $locale = shift;
my $day = shift;
_utf8_output();
my $pattern = '%Y-%m-%d %A';
my $dt = DateTime->now( locale => $locale )->set( day => $day );
my $input = $dt->strftime($pattern);
my $strptime;
is(
exception {
$strptime = DateTime::Format::Strptime->new(
pattern => $pattern,
locale => $locale,
on_error => 'croak',
);
},
undef,
'constructor with day name in pattern (%A)'
) or return;
my $parsed_dt;
is(
exception {
$parsed_dt = $strptime->parse_datetime($input)
},
undef,
"parsed $input"
) or return;
is(
$parsed_dt->strftime($pattern),
$input,
'strftime output matches input'
);
}
sub test_months {
my $locale = shift;
subtest(
'months',
sub {
foreach my $month ( 1 .. 12 ) {
subtest(
"Month $month",
sub { _test_one_month( $locale, $month ) },
);
}
}
);
}
sub _test_one_month {
my $locale = shift;
my $month = shift;
_utf8_output();
my $pattern = '%Y-%m-%d %B';
my $dt
= DateTime->now( locale => $locale )->truncate( to => 'month' )
->set( month => $month );
my $input = $dt->strftime($pattern);
my $strptime;
is(
exception {
$strptime = DateTime::Format::Strptime->new(
pattern => $pattern,
locale => $locale,
on_error => 'croak',
);
},
undef,
'constructor with month name (%B)'
) or return;
my $parsed_dt;
is(
exception {
$parsed_dt = $strptime->parse_datetime($input)
},
undef,
"parsed $input"
) or return;
is(
$parsed_dt->strftime($pattern),
$input,
'strftime output matches input'
);
}
sub test_am_pm {
my $locale = shift;
subtest(
'am/pm',
sub {
foreach my $hour ( 0, 11, 12, 23 ) {
subtest(
"Hour $hour",
sub { _test_one_hour( $locale, $hour ); },
);
}
}
);
}
sub _test_one_hour {
my $locale = shift;
my $hour = shift;
_utf8_output();
my $pattern = '%Y-%m-%d %H:%M %p';
my $dt = DateTime->now( locale => $locale )->set( hour => $hour );
my $input = $dt->strftime($pattern);
my $strptime;
is(
exception {
$strptime = DateTime::Format::Strptime->new(
pattern => $pattern,
locale => $locale,
on_error => 'croak',
);
},
undef,
'constructor with meridian (%p)'
) or return;
my $parsed_dt;
is(
exception {
$parsed_dt = $strptime->parse_datetime($input)
},
undef,
"parsed $input",
) or return;
is(
$parsed_dt->strftime($pattern),
$input,
'strftime output matches input'
);
}
sub test_locale {
my $locale = shift;
my $strptime;
is(
exception {
$strptime = DateTime::Format::Strptime->new(
pattern => '%Y-%m-%d',
locale => $locale,
on_error => 'croak',
);
},
undef,
'constructor with locale'
) or return;
my $input = '2015-01-30';
my $parsed_dt;
is(
exception {
$parsed_dt = $strptime->parse_datetime($input)
},
undef,
"parsed $input",
) or return;
is(
$parsed_dt->locale->$code_meth,
$locale,
"code of locale for DateTime returned by parser is $locale"
);
}
sub _utf8_output {
binmode $_, ':encoding(UTF-8)'
for map { Test::Builder->new->$_ }
qw( output failure_output todo_output );
}
| 20.582979 | 78 | 0.467025 |
ed60437aebff7591aff7029f044a6068adbb3a9c
| 2,561 |
t
|
Perl
|
t/BloodDonor.t
|
anishk33/zeroclickinfo-goodies
|
c9a7c926bd57e21eef419804e1bc868f35c8c7f8
|
[
"Apache-2.0"
] | null | null | null |
t/BloodDonor.t
|
anishk33/zeroclickinfo-goodies
|
c9a7c926bd57e21eef419804e1bc868f35c8c7f8
|
[
"Apache-2.0"
] | null | null | null |
t/BloodDonor.t
|
anishk33/zeroclickinfo-goodies
|
c9a7c926bd57e21eef419804e1bc868f35c8c7f8
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env perl
use strict;
use warnings;
use Test::More;
use DDG::Test::Goodie;
zci answer_type => "blood_donor";
zci is_cached => 1;
sub build_structure
{
my ($blood_type, $data, $keys) = @_;
return {
id => 'blood_donor',
name => 'Blood Donors',
description => 'Returns available donors for a blood type',
meta => {
sourceName => 'Wikipedia',
sourceUrl => 'https://en.wikipedia.org/wiki/Blood_type'
},
templates => {
group => 'list',
options => {
content => 'record'
}
},
data => {
title => "Donors for blood type $blood_type",
record_data => $data,
record_keys => $keys
}
};
}
ddg_goodie_test(
['DDG::Goodie::BloodDonor'],
'donor A+' => test_zci("Ideal donor: A+\nOther donors: A+ or O+\nOnly if no Rh(+) found: A- or O-",
structured_answer => build_structure("A+",{
"Ideal donor" => "A+",
"Other donors" => "A+ or O+",
"Only if no Rh(+) found" => "A- or O-"
},
["Ideal donor", "Other donors", "Only if no Rh(+) found"]
)
),
'donors for A+' => test_zci("Ideal donor: A+\nOther donors: A+ or O+\nOnly if no Rh(+) found: A- or O-",
structured_answer => build_structure("A+",{
"Ideal donor" => "A+",
"Other donors" => "A+ or O+",
"Only if no Rh(+) found" => "A- or O-"
},
["Ideal donor", "Other donors", "Only if no Rh(+) found"]
)
),
'blood donor A+' => test_zci("Ideal donor: A+\nOther donors: A+ or O+\nOnly if no Rh(+) found: A- or O-",
structured_answer => build_structure("A+",{
"Ideal donor" => "A+",
"Other donors" => "A+ or O+",
"Only if no Rh(+) found" => "A- or O-"
},
["Ideal donor", "Other donors", "Only if no Rh(+) found"]
)
),
'blood donors for A+' => test_zci("Ideal donor: A+\nOther donors: A+ or O+\nOnly if no Rh(+) found: A- or O-",
structured_answer => build_structure("A+",{
"Ideal donor" => "A+",
"Other donors" => "A+ or O+",
"Only if no Rh(+) found" => "A- or O-"
},
["Ideal donor", "Other donors", "Only if no Rh(+) found"]
)
),
'donor o+' => test_zci("Ideal donor: O+\nOther donors: O+\nOnly if no Rh(+) found: O-",
structured_answer => build_structure("O+",{
"Ideal donor" => "O+",
"Other donors" => "O+",
"Only if no Rh(+) found" => "O-"
},
["Ideal donor", "Other donors", "Only if no Rh(+) found"]
)
),
);
done_testing;
| 29.77907 | 114 | 0.513081 |
ed509f9275c041aa7e2011b21033a1adf8dad950
| 9,240 |
pl
|
Perl
|
scripts/misc/generate_clin_significance_tables.pl
|
MatBarba/ensembl-variation
|
314c4e3f5708d9060605b7822b5cb8d4d867278c
|
[
"Apache-2.0"
] | null | null | null |
scripts/misc/generate_clin_significance_tables.pl
|
MatBarba/ensembl-variation
|
314c4e3f5708d9060605b7822b5cb8d4d867278c
|
[
"Apache-2.0"
] | null | null | null |
scripts/misc/generate_clin_significance_tables.pl
|
MatBarba/ensembl-variation
|
314c4e3f5708d9060605b7822b5cb8d4d867278c
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env perl
# Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
# Copyright [2016] EMBL-European Bioinformatics Institute
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
=head1 CONTACT
Please email comments or questions to the public Ensembl
developers list at <http://lists.ensembl.org/mailman/listinfo/dev>.
Questions may also be sent to the Ensembl help desk at
<helpdesk.org>.
=cut
# Script to generate tables to display the clinical significance tables
use strict;
use warnings;
use Bio::EnsEMBL::Registry;
use Getopt::Long;
my $registry = 'Bio::EnsEMBL::Registry';
# Print the usage instructions if run without parameters
usage() unless (scalar(@ARGV));
my ($species, $host, $db_version, $output_file, $help);
GetOptions(
'v=i' => \$db_version,
'o=s' => \$output_file,
'host=s' => \$host,
'species|s=s' => \$species,
'help!' => \$help
);
usage ("Species, host, version and output_file must be specified") unless ($species && $host && $db_version && $output_file);
# Load the registry from db
$registry->load_registry_from_db(
-host => $host,
-user => 'ensro',
-db_version => $db_version,
);
my $vdb = $registry->get_DBAdaptor($species,'variation');
my $dbVar = $vdb->dbc->db_handle;
my %info = (
'label' => ['ClinVar','DGVa'],
'clin_sign' => [
qq{ SELECT DISTINCT clinical_significance FROM variation WHERE variation_id NOT IN (SELECT variation_id FROM failed_variation) AND clinical_significance is not NULL},
qq{ SELECT DISTINCT clinical_significance FROM structural_variation
WHERE structural_variation_id NOT IN (SELECT structural_variation_id FROM failed_structural_variation)
AND clinical_significance is not NULL
}
],
'query' => [
qq{ SELECT name FROM variation
WHERE FIND_IN_SET(?,clinical_significance)
AND variation_id NOT IN (SELECT variation_id FROM failed_variation)
LIMIT 1
},
qq{ SELECT v1.variation_name FROM structural_variation v1, structural_variation v2, structural_variation_association vas
WHERE v1.is_evidence=0
AND FIND_IN_SET(?,v2.clinical_significance)
AND v2.structural_variation_id=vas.supporting_structural_variation_id
AND v2.is_evidence=1
AND v1.structural_variation_id=vas.structural_variation_id
AND v1.structural_variation_id NOT IN
(SELECT structural_variation_id FROM failed_structural_variation)
LIMIT 1
}
],
'link' => [ qq{/Homo_sapiens/Variation/Explore?v=},qq{/Homo_sapiens/StructuralVariation/Evidence?sv=}],
);
my %star_ranking = ( 'status' => { 'not classified by submitter' => { 'term' => 'no assertion', 'stars' => 0 },
'classified by single submitter' => { 'term' => 'single submitter', 'stars' => 1 },
'classified by multiple submitters' => { 'term' => 'multiple submitters', 'stars' => 2 },
'reviewed by expert panel' => { 'term' => 'reviewed by expert panel', 'stars' => 3 },
'practice guideline' => { 'term' => 'practice guideline', 'stars' => 4 }
},
'query' => [qq{ SELECT pf.object_id FROM phenotype_feature pf, phenotype_feature_attrib pfa, attrib_type a
WHERE pf.phenotype_feature_id=pfa.phenotype_feature_id
AND pfa.attrib_type_id=a.attrib_type_id
AND a.code='review_status'
AND pf.type='Variation'
AND pfa.value LIKE ?
}],
'link' => [qq{/Homo_sapiens/Variation/Phenotype?v=}]
);
my $html;
my $bg = '';
my $icon_path = '/i/val/clinsig_';
my $border_left = qq{ style="border-left:1px solid #BBB"};
# Clinical significance terms
my %clin_sign;
foreach my $type_stmt (@{$info{'clin_sign'}}) {
my $sth = $dbVar->prepare($type_stmt);
$sth->execute();
while (my ($vals) = $sth->fetchrow_array()){
foreach my $val (split(',',$vals)) {
$clin_sign{$val} = 1;
}
}
$sth->finish;
}
# Clinical significance examples
my $html_content = add_table_header($info{'label'});
my $count = 0;
my $cs_term_count = scalar (keys %clin_sign);
foreach my $cs_term (sort(keys %clin_sign)) {
$count ++;
my $icon_label = $cs_term;
$icon_label =~ s/ /-/g;
my $icon_col = qq{<td style="text-align:center"><img src="$icon_path$icon_label.png" title="$cs_term"/></td>};
my $examples;
for (my $i=0; $i < scalar(@{$info{'query'}});$i++) {
$examples .= get_variant_example($i,$cs_term,\%info);
}
$html_content .= qq{ <tr$bg>$icon_col<td>$cs_term</td>$examples</tr>\n};
$bg = set_bg();
print STDERR qq{Term "$cs_term" done ($count/$cs_term_count)\n};
}
# Four-star rating
my $html_star_content;
foreach my $review_status (sort {$star_ranking{'status'}{$a}{'stars'} <=> $star_ranking{'status'}{$b}{'stars'}} keys(%{$star_ranking{'status'}})) {
my $count_stars = $star_ranking{'status'}{$review_status}{'stars'};
my $search_term = $star_ranking{'status'}{$review_status}{'term'};
my $stars = qq{<span class="_ht" title="$review_status">};
for (my $i=1; $i<5; $i++) {
my $star_color = ($i <= $count_stars) ? 'gold' : 'grey';
$stars .= qq{<img style="vertical-align:top" src="/i/val/$star_color\_star.png" alt="$star_color"/>};
}
$stars .= qq{</span>};
my $star_example = get_variant_example(0,'%'.$search_term.'%',\%star_ranking);
$html_star_content .= qq{ <tr$bg>\n <td>$stars</td>\n <td>$review_status</td>\n $star_example\n </tr>};
$bg = set_bg();
}
## CONTENT ##
$html = qq{
<table class="ss" style="width:auto">
$html_content
</table>
<p>Further explanations about the clinical significance terms are available on the <a href="http://www.ncbi.nlm.nih.gov/clinvar/docs/clinsig/" target="_blank">ClinVar website</a>.</p>
<h3>ClinVar rating</h3>
<p>We use the <a href="http://www.ncbi.nlm.nih.gov/clinvar/docs/details/#interpretation" target="_blank">ClinVar "four-star" rating</a> system to indicate the quality of classification/validation of the variant:</p>
<table class="ss" style="width:auto">
<tr><th>Rating</th><th>Description</th><th$border_left>Example</th></tr>
$html_star_content
</table>
};
open OUT, "> $output_file" or die $!;
print OUT $html;
close(OUT);
#############
## METHODS ##
#############
sub set_bg {
return ($bg eq '') ? ' class="bg2"' : '';
}
sub execute_stmt_one_result {
my $stmt = shift;
my $value = shift;
my $sth = $dbVar->prepare($stmt);
$sth->execute($value);
return $sth->fetchrow_array;
}
sub get_variant_example {
my $order = shift;
my $value = shift;
my $data = shift;
my $var = (execute_stmt_one_result($data->{'query'}->[$order],$value))[0];
my $example = (defined($var)) ? sprintf (qq{<a href="%s%s">%s</a>},$data->{'link'}->[$order],$var,$var) : '-';
return qq{<td$border_left>$example</td>};
}
sub add_table_header {
my $labels = shift;
my $icon_column = qq{<th><span class="_ht ht" title="Icons designed by Ensembl">Icon</span></th>};
my $eg_columns;
foreach my $label (@$labels) {
$eg_columns .= qq{<th$border_left>$label example</th>};
}
return qq{ <tr>$icon_column<th>Value</th>$eg_columns</tr>\n};
}
sub usage {
my $msg = shift;
print qq{
$msg
Usage: perl generate_clin_significance_tables.pl [OPTION]
Update the clinical significance tables in "data_description.html" (under public-plugins/ensembl/htdocs/info/genome/variation/).
Options:
-help Print this message
-v Ensembl version, e.g. 65 (Required)
-o An HTML output file name (Required)
-host Host of the human database (Required)
-species Species name (Required)
} . "\n";
exit(0);
}
| 36.96 | 215 | 0.583442 |
ed05005a83b02d19d69317751d6be927aa37b0c4
| 2,672 |
pm
|
Perl
|
modules/Bio/EnsEMBL/Analysis/Tools/Pmatch/ContigHit.pm
|
ens-bwalts/ensembl-analysis
|
0ce32243f3ce17aad133ee1f5016e20ddff545bd
|
[
"Apache-2.0"
] | null | null | null |
modules/Bio/EnsEMBL/Analysis/Tools/Pmatch/ContigHit.pm
|
ens-bwalts/ensembl-analysis
|
0ce32243f3ce17aad133ee1f5016e20ddff545bd
|
[
"Apache-2.0"
] | null | null | null |
modules/Bio/EnsEMBL/Analysis/Tools/Pmatch/ContigHit.pm
|
ens-bwalts/ensembl-analysis
|
0ce32243f3ce17aad133ee1f5016e20ddff545bd
|
[
"Apache-2.0"
] | null | null | null |
# Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# holds a pmatch contig hit - simply the name(identifier) of the contig
# and a list of start-end positions
package Bio::EnsEMBL::Analysis::Tools::Pmatch::ContigHit;
use warnings ;
use strict ;
use vars qw(@ISA);
use Bio::EnsEMBL::Utils::Exception qw(throw warning);
use Bio::EnsEMBL::Utils::Argument qw( rearrange );
@ISA = qw();
=head2 new
Title : new
Usage :
Function: constructor
Example :
Returns :
Args :
=cut
sub new {
my ($class, @args) = @_;
my $self = bless {}, $class;
my ($id) = rearrange(['ID'], @args);
throw("No id") unless defined $id;
$self->id($id);
$self->{'_forward_pairs'} = [];
$self->{'_reverse_pairs'} = [];
return $self;
}
=head2 id
Title : id
Usage :
Function: get/set for contig id
Example :
Returns :
Args :
=cut
sub id {
my ($self,$id) = @_;
if ($id) {
$self->{'id'} = $id;
}
return $self->{'id'};
}
=head2 add_CoordPair
Title : add_CoordPair
Usage :
Function: adds a CoordPair to the list making up this hit
Example :
Returns :
Args :
=cut
sub add_CoordPair {
my ($self,$pair) = @_;
throw('No coord pair') unless defined $pair;
throw('$pair is not a Bio::EnsEMBL::Analysis::Tools::Pmatch::CoordPair') unless $pair->isa("Bio::EnsEMBL::Analysis::Tools::Pmatch::CoordPair");
if($pair->strand == 1) {
push(@{$self->{_forward_pairs}},$pair);
}
else {
push(@{$self->{_reverse_pairs}},$pair);
}
}
=head2 each_ForwardPair
Title : each_ForwardPair
Usage :
Function: returns CoordPairs represeting hits between a prtein and the forward strand of the contig
Example :
Returns :
Args :
=cut
sub each_ForwardPair {
my ($self) = @_;
return $self->{_forward_pairs};
}
=head2 each_ReversePair
Title : each_Reverseair
Usage :
Function: returns CoordPairs representing hits between a protein and the reverse strand of the contig
Example :
Returns :
Args :
=cut
sub each_ReversePair {
my ($self) = @_;
return $self->{_reverse_pairs};
}
1;
| 19.792593 | 145 | 0.668413 |
ed0e02bb5f349ca5cb23431447dab3b65efb4011
| 550 |
pl
|
Perl
|
package-updates/log_parser.pl
|
vsilvar/webmin
|
9c736481d7d07eb4c8dcdb2cacff2365b74efbe0
|
[
"BSD-3-Clause"
] | 2 |
2019-10-17T18:00:03.000Z
|
2019-10-17T18:02:31.000Z
|
package-updates/log_parser.pl
|
vsilvar/webmin
|
9c736481d7d07eb4c8dcdb2cacff2365b74efbe0
|
[
"BSD-3-Clause"
] | 3 |
2020-04-30T14:00:11.000Z
|
2021-05-10T23:28:17.000Z
|
package-updates/log_parser.pl
|
vsilvar/webmin
|
9c736481d7d07eb4c8dcdb2cacff2365b74efbe0
|
[
"BSD-3-Clause"
] | 13 |
2017-09-25T21:59:36.000Z
|
2019-06-18T14:31:57.000Z
|
# log_parser.pl
# Functions for parsing this module's logs
do 'package-updates-lib.pl';
# parse_webmin_log(user, script, action, type, object, ¶ms)
# Converts logged information from this module into human-readable form
sub parse_webmin_log
{
my ($user, $script, $action, $type, $object, $p) = @_;
if ($action eq 'update') {
return &text('log_update', $object);
}
elsif ($action eq 'sched') {
return $text{$object ? 'log_sched' : 'log_unsched'};
}
elsif ($action eq 'refresh') {
return $text{'log_refresh'};
}
else {
return undef;
}
}
| 22 | 71 | 0.683636 |
ed048d09700481706bf6609a7244484d7179ad0b
| 8,024 |
al
|
Perl
|
benchmark/benchmarks/FASP-benchmarks/data/planar-triangulations-2/triangulation-0430-430-1712.al
|
krzysg/FaspHeuristic
|
1929c40e3fbc49e68b04acfc5522539a18758031
|
[
"MIT"
] | null | null | null |
benchmark/benchmarks/FASP-benchmarks/data/planar-triangulations-2/triangulation-0430-430-1712.al
|
krzysg/FaspHeuristic
|
1929c40e3fbc49e68b04acfc5522539a18758031
|
[
"MIT"
] | null | null | null |
benchmark/benchmarks/FASP-benchmarks/data/planar-triangulations-2/triangulation-0430-430-1712.al
|
krzysg/FaspHeuristic
|
1929c40e3fbc49e68b04acfc5522539a18758031
|
[
"MIT"
] | null | null | null |
1 291 414
2 37 63 171 249 316
3 26 374 389 397 405
4 215 252 333 359
5 78 251 281 332
6 16 194 391 396
7 223 272 310 386 419
8 150 307 367
9 6 194 348 391
10 2 245 316 372 419
11 56 166 281 295
12 19 199 375
13 25 30 139 140 160 240 244
14 181 256 341 409
15 164 246 286 311 364
16 6 80 396
17 57 142 430
18 43 302 305 314
19 12 24 207 375
20 221 292 337 380
21 93 392 418
22 74 263 318 392
23 74 80 155 186 247
24 104 113 207 297
25 13 140 162 346 420
26 374 389
27 88 107 237
28 39 103 340 360 401
29 136 160 175 240
30 13 29 73 240 361
31 12 342 350 421
32 250 288 399
33 5 166 251 281
34 28 39 269 340
35 45 49 161 278 290 415
36 261 267 313
37 2 63 289 323 372 419
38 123 303 362
39 198 209 269
40 109 111 128 144 229 248 270 320
41 151 208
42 27 37 205 289 323
43 18 208 302 353 371 391
44 112 327
45 161 290 401
46 58 242
47 149 152 177 249
48 54 298
49 51 103 191 307 415
50 36 252
51 8 49 92
52 21 97 259 418
53 11 238 350
54 19 91 298
55 105 331 348 367 391
56 53 238
57 17 337 382
58 46 77 163 214 242 362
59 6 137 194 230 396
60 187
61 295 400
62 111 344
63 2 37
64 138 167
65 38 125 204 237 362 393 413
66 144 246 311
67 138 227 309 315
68 44 116 127 327
69 217 264 311 389
70 101 153
71 95 145 212
72 116 168 196
73 139 173 241 325 358
74 318 377 392
75 206 278 286 290 339 345 363 365
76 62 109 144 344
77 123 200 262 343
78 11 56 238 281 407
79 144 206 340 345
80 16 247 318 371 396
81 304 381
82 87 388 411 425
83 188 218 344 370 417
84 8 39 331 360
85 82 87 388
86 156 282 422
87 304 328 329 411
88 27 197 296 347
89 41 71 95 151 208 212 254 408
90 83 188 231 417
91 48 414
92 51 356
93 21 392 403
94 182 211 385
95 71 89 145 287 408 416
96 8 51 92 150 194 349
97 93 213 259 321 388 403 425
98 32 36 50 250 252 284 288 313 394
99 118 216 225 242 264 312 384
100 200 210 404
101 70 153 202
102 389 405
103 28 161 360 401
104 207 297 325 358 395 429
105 41 55 198 208 391 427
106 20 86 380 422
107 197 237 393 426
108 201 338 384
109 40 111 144
110 247 253 288 302 394 399
111 40 62 76 109 270 379
112 44 68 116 119 177 249
113 195 297 378 398 406
114 182
115 328
116 68 112 177 402
117 70 141 146 202 415
118 69 225 264 343
119 44 112 171 249 368
120 72 127 168 285
121 17 125 303 404 413
122 79 83 218 340 344
123 58 77 200 303 362
124 148 185 241 294 354
125 65 204 413
126 162 228 257 366 420
127 68 72 116 285 327
128 130 248 320
129 92 96 166 349 410
130 128 262 279 320 343
131 159 256 265 315 324
132 57 111 270 379 382
133 60 156 287 380
134 301 319
135 53 179 189
136 117 174 202 373
137 22 59 239 263 332 396
138 167 220 232 309 380
139 30 73 154 244 358
140 13 280 338
141 35 117 226 278
142 17 121 270 404
143 70 117 153
144 79 109 122 246 248 344
145 157 187 212 416
146 117 143 415 428
147 181 324 351 368
148 124 134 185 189 319 424
149 47 152 158 245 291 310
150 8 9 194 367
151 89 105
152 47 233 402
153 61 70 143 295 301 400
154 139 162 195 346 358
155 23 74 169 377
156 258
157 145 187 423
158 47 149 245
159 94 211 368
160 13 29 140 201 240
161 35 45 49 103 401
162 126 346 420
163 46 77 225
164 69 286 311
165 34 188 269 340
166 275 281 410
167 138 232 243 383 409
168 196 308
169 388
170 36 50 193 261
171 2 63 147 249
172 13 25 154 244 346
173 30 185 241
174 117 136 373
175 29 397 405
176 87 328
177 152
178 7 19 24 54 91 310 378
179 11 53 295 319
180 84 331 367
181 14 63 322 341 351
182 94 157 273 309 385
183 299 350 395 429
184 232 256 265 383
185 30 134 173 361
186 155 399
187 145 157 416 423
188 83 90 269
189 135 179 319
190 17 121 221 243
191 49 51 356 415
192 121 125 190 204
193 170 260
194 6 9 59 96 230
195 236 297 357 358
196 72 81 304 308 381
197 88 107 268 347 352 426
198 55 84 105 209 331 427
199 12 342 375
200 100 123 262 303
201 29 108 140 175 338 405
202 29 30 70 101 136 361
203 132 234 258 382 422
204 42 65 125 237 289
205 27 42 88 296
206 75 79 286 345
207 12 24 31 395
208 41 89 212 314
209 39 269 427
210 200 229 262 404
211 94 159 193 260
212 71 89 145 208 253 314 359
213 81 274 298 321 425
214 58 362 393
215 157 170 182 252 385
216 242 257 420
217 248 279
218 165 188 340
219 62 83 258 344 379
220 20 64 243
221 20 220 243 430
222 231 283 287 417
223 272 406
224 32 85 169 250 329
225 46 99 343
226 117 174 175 278 374 397
227 67 131 265
228 236 357 366 376
229 40 262 320
230 96 137 194
231 90 188 222 269 300 417 427
232 138 184 265 383
233 72 149 235 291 402
234 132 203 258
235 72 335 381
236 228 296 323
237 27 42 107 204 393
238 53 259
239 255 263
240 30 160
241 73 185 294
242 46 99 214 312
243 64 190 220
244 154
245 158 316
246 15 66 79 206 286 364
247 110 186 353 399
248 66 130 144 279 311
249 112 158 177 316
250 98 284 329 330
251 5 33 275 332 349
252 4 170 215 334
253 110 212 305 314 359
254 89 151 408
255 52 239 259 263 332
256 131 184 265
257 214 216 242 420 426
258 156 203 219 234 287 370 422
259 52 238 255 277 321 326 342
260 170 193 211 368
261 36 127 170 260 267 285 327 368
262 200 229 320 343
263 22 52 239 276 418
264 387 389
265 67 138
266 192 204 289 341
267 36 115 261 313
268 27 107 352
269 39 165 231
270 17 40 57 132 210 229 404
271 4 253
272 378 398 406
273 114 309
274 81 213 235
275 33 129 349
276 22 418
277 54 298 342 375 412
278 75 226 290 363
279 69 343
280 25 140 336 338
281 78
282 86 133 156
283 95 222 287 408
284 98 313 330
285 120 127 267 293
286 246 365
287 95 156 222 258 283 370 417
288 32 98 110 394 399
289 42 63 204 341
290 35 45 75 401
291 149 235 310
292 20 106 337 422
293 115 267 285
294 325
295 11 179 319 400
296 88 228 236 323 376
297 113 195 358
298 48 54 335 412
299 135 183 189 424
300 151 222 231 283 408 427
301 101 153 202 295 319 361
302 43 110 305 353
303 100 123 200 404
304 81 308 411 425
305 18 110 253
306 190 192 243 355 409
307 8 49 51 84 103 360
308 87 120 168 196 285 293 304
309 94 315 317
310 1 149 414
311 15 164 217 248 364
312 46 99 225
313 36 115 267
314 43 208 253 305
315 67 94 131 159 227 309
316 158 249
317 114 273 369 380 423
318 22 23 74 80
319 134 189 301
320 40 128 262
321 97 213 259 412
322 14 147 256
323 37 205 223 236 406 419
324 147 159 256 322
325 73 241 354 358
326 78 238 255 259 407
327 368
328 115 293 308 330
329 85 87 176 224 330
330 115 176 313 328 329
331 84 367
332 137 230 239
333 145 157 215 359
334 4 98 252 253 271 394
335 1 48 213 235 274 291 298 414
336 216 280 420
337 17 57 382
338 201 216 280 336 384
339 28 75 290 345 401
340 79 165 218 339 345
341 14 63 181 266 289 355
342 199 238 277 350 375
343 69 118 163
344 76 144 219
345 206 339
346 162 172
347 296 376
348 9 55 150 367
349 96 129 230 251 275 332
350 31 53 135 183 238 299 342
351 63 147 171 181
352 27 88
353 110 302
354 294 325 424
355 14 192 266
356 51 92 191 410
357 154 236 366
358 104 139 195 297
359 145 271 333
360 28 39 103 307
361 30 134 185 301
362 38 58 65 393
363 226 374
364 66
365 286 363 374 390
366 126 154 162
367 8 84 150 180 348
368 44 147 171 211 261 324 327
369 60 133 380 423
370 219
371 43 80 247 353
372 2
373 29 136 174 175 226
374 3 26 363 390 397
375 12 19 54 199 342
376 126 257 347 426
377 74 93 155 169 392
378 7 24 113 272 398
379 62 234 258
380 20 86 220 282 309 317
381 72 81 235 274
382 132 292 337 422
383 14 167 232 256 409
384 99 102 216 387
385 94 170 182 193 211
386 7 149 245 310 419
387 99 102 264 384 389
388 82 85 93 224 377 425
389 102 264
390 26 69 164 286 365 374 389
391 16 80 105 208 348 371
392 22 74 276 377 418
393 237
394 98 253 288 334
395 31 350 429
396 6 22 59 80 318
397 175 226 405
398
399 155 169 186 224 288
400 11 61 143 146 153 166 295
401 28 161 290 339
402 72 116 177 233
403 21 52
404 121 142 210
405 3 102 108 175 201 384 389 397
406 195 223 236 398
407 5 255 332
408 89 151 254 300
409 243 306 355 383
410 92 129 146 166 191 400 428
411 304 425
412 213 259 277 298
413 38 121 125 303
414 1 48 91 178 310
415 35 117 141 428
416 60 133 287
417 222 231 287 370
418 21 52 276
419 223 245 372
420 25 162 216 280 336
421 12 31 199 342
422 86 156 258 292
423 60 114 182 187 317
424 124 183 189 299 354 429
425 81 82 97 213
426 107 197 214 347 376 393
427 105 151 209 269
428 191 415
429 325 354 395 424
430 20 190 337
| 18.660465 | 35 | 0.733175 |
ed795d52357483d85931509db7be98fa78bef21d
| 5,448 |
pm
|
Perl
|
lib/Octium/Sked/Timetable/IDFrameSet.pm
|
aaronpriven/actium
|
08b26b9372a302a1470e8387a8d8a96ed43df399
|
[
"Artistic-1.0"
] | 1 |
2017-01-30T04:22:33.000Z
|
2017-01-30T04:22:33.000Z
|
lib/Octium/Sked/Timetable/IDFrameSet.pm
|
aaronpriven/actium
|
08b26b9372a302a1470e8387a8d8a96ed43df399
|
[
"Artistic-1.0"
] | 39 |
2015-07-10T22:52:17.000Z
|
2020-05-20T03:45:02.000Z
|
lib/Octium/Sked/Timetable/IDFrameSet.pm
|
aaronpriven/actium
|
08b26b9372a302a1470e8387a8d8a96ed43df399
|
[
"Artistic-1.0"
] | null | null | null |
package Octium::Sked::Timetable::IDFrameSet 0.012;
# Actium/O/Sked/Timetable/IDFrameSet.pm
# Moose object representing the frame set (series of one or more frames
# used on a page) for an InDesign timetable
use Actium ('class');
use Octium;
use Octium::Sked::Timetable::IDFrame;
use overload '""' => sub { shift->description };
# overload ### DEP ###
has description => (
isa => 'Str',
is => 'ro',
);
has frames_r => (
traits => ['Array'],
is => 'bare',
isa => 'ArrayRef[Octium::Sked::Timetable::IDFrame]',
required => 1,
init_arg => 'frames',
handles => {
frames => 'elements',
frame => 'get',
frame_count => 'count',
},
);
has compression_level => (
is => 'ro',
isa => 'Int',
default => 0,
);
has height => (
is => 'ro',
isa => 'Int',
required => 1,
);
has is_portrait => (
is => 'ro',
isa => 'Bool',
default => 0,
);
around BUILDARGS ( $orig, $class: @) {
my $params_r = Actium::hashref(@_);
# run through each frame -- if it's not already an object,
# instantiate the appropriate object and place it back in list
return $class->$orig(@_)
unless exists $params_r->{frames}
and Actium::reftype( $params_r->{frames} ) eq 'ARRAY';
my $frames_r = $params_r->{frames};
foreach my $i ( 0 .. $#{$frames_r} ) {
my $frame_r = $frames_r->[$i];
next if blessed($frame_r);
croak 'Frame passed to '
. __PACKAGE__
. '->new must be reference to hash of attribute specifications'
unless Actium::reftype($frame_r) eq 'HASH';
$frames_r->[$i] = Octium::Sked::Timetable::IDFrame->new($frame_r);
}
return $class->$orig($params_r);
} ## tidy end: around BUILDARGS
Actium::immut;
1;
__END__
head1 NAME
Octium::Sked::Timetable::IDFrameSet - Object representing a set of
InDesign timetable frames
=head1 VERSION
This documentation refers to version 0.002
=head1 SYNOPSIS
use Octium::Sked::Timetable::IDFrameSet;
my $frameset = Octium::Sked::Timetable::IDFrameSet->new(
description => 'Landscape halves',
compression_level => 0,
height => 42,
frames => [
{ widthpair => [ 4, 1 ],
frame_idx => 0,
},
{ widthpair => [ 5, 0 ],
frame_idx => 2,
},
],
);
=head1 DESCRIPTION
Each page of an Actium timetable document in InDesign consists of a
series of text frames that are linked to each other, and to the pages
before and after.
These frames overlap, and the actual text is placed in an appropriate
frame depending on the specific size of the timetable and what other
timetables are placed with it on the same page.
This object represents a set of frames, and contains the frame objects
and the compression level.
=head1 ATTRIBUTES
=over
=item B<description>
An optional text description of this frame (usually something like
"Portrait halves" for two frames representing two halves of a portrait
page). At this point it's not used for anything, but it's convenient to
have a place for it in I<new()> calls.
=item B<is_portrait>
True if this frameset represents a portrait page. Defaults to false.
=item B<frames>
Required during construction, it consists of the frames that make up
the frameset. Frames are described in
L<Octium::Sked::Timetable::IDFrame|Octium::Sked::Timetable::IDFrame>.
In the constructor, it should be passed as an array reference; it will
be returned as a plain list of objects.
If any of the values passed in the I<frames> entry is an unblessed hash
reference, Octium::Sked::Timetable::IDFrameSet will pass it to
Octium::Sked::Timetable::IDFrame->new() and use the result. (So, you
don't have to explicitly create the IDFrame objects; this module will
do it for you.)
=item B<height>
Requiretd during construction, this is the height of these frames in
terms of rows in the table. It should be specified excluding the number
of rows used for the header (line name, direction, days, and timepoint
names).
=item B<compression_level>
An integer, it represents the amount of shrinkage this timetable will
be subjected to. Compression level 0 is full size; compression level 1
is smaller; compression level 2 is smaller yet; etc.
The idea is that timetables that are small can be printed with bigger
type or with bigger table cells, while timetables that are large might
need to be shrunk ("compressed") to fit on a page. The various IDFrame
objects are designed to allow different sizes to be used in different
circumstances.
=back
=head1 DEPENDENCIES
=over
=item Perl 5.016
=item Moose
=item MooseX::StrictConstructor
=item namespace::autoclean
=item Scalar::Util
=item Octium::Sked::Timetable::IDFrame
=back
=head1 AUTHOR
Aaron Priven <[email protected]>
=head1 COPYRIGHT & LICENSE
Copyright 2013
This program is free software; you can redistribute it and/or modify it
under the terms of either:
=over 4
=item * the GNU General Public License as published by the Free
Software Foundation; either version 1, or (at your option) any
later version, or
=item * the Artistic License version 2.0.
=back
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
| 24.430493 | 74 | 0.676762 |
ed4813f3f2dec7482f4cfbf5689b71c0ec8fc794
| 4,437 |
al
|
Perl
|
AddOns/OIOUBL/app/src/Reminder/REP13632.CreateElectronicReminders.al
|
Ema-Falamas/ALAppExtensions
|
f9ee74c1e06c7382b1f8ff74a0e48ac3781646f6
|
[
"MIT"
] | null | null | null |
AddOns/OIOUBL/app/src/Reminder/REP13632.CreateElectronicReminders.al
|
Ema-Falamas/ALAppExtensions
|
f9ee74c1e06c7382b1f8ff74a0e48ac3781646f6
|
[
"MIT"
] | null | null | null |
AddOns/OIOUBL/app/src/Reminder/REP13632.CreateElectronicReminders.al
|
Ema-Falamas/ALAppExtensions
|
f9ee74c1e06c7382b1f8ff74a0e48ac3781646f6
|
[
"MIT"
] | null | null | null |
// ------------------------------------------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.
// ------------------------------------------------------------------------------------------------
report 13632 "OIOUBL-Create Elec. Reminders"
{
Caption = 'Create Electronic Reminders';
ProcessingOnly = true;
dataset
{
dataitem("Issued Reminder Header"; "Issued Reminder Header")
{
DataItemTableView = SORTING ("No.");
RequestFilterFields = "No.", "Customer No.", "OIOUBL-GLN", "OIOUBL-Electronic Reminder Created";
trigger OnAfterGetRecord();
begin
CODEUNIT.RUN(CODEUNIT::"OIOUBL-Export Issued Reminder", "Issued Reminder Header");
if LogInteraction then
SegManagement.LogDocument(
8, "No.", 0, 0, DATABASE::Customer, "Customer No.", '', '', "Posting Description", '');
COMMIT();
Counter := Counter + 1;
end;
trigger OnPostDataItem();
begin
MESSAGE(SuccessMsg, Counter);
end;
trigger OnPreDataItem();
var
IssuedReminderHeader: Record 297;
begin
Counter := 0;
// Any electronic reminders?
IssuedReminderHeader.COPY("Issued Reminder Header");
IssuedReminderHeader.FILTERGROUP(8);
IssuedReminderHeader.SETFILTER("OIOUBL-GLN", '<>%1', '');
if NOT IssuedReminderHeader.FINDFIRST() then
ERROR(NothingToCreateErr);
// All electronic reminders?
IssuedReminderHeader.SETRANGE("OIOUBL-GLN", '');
if IssuedReminderHeader.FINDFIRST() then
if NOT CONFIRM(DocumentsWillBeSkippedQst, TRUE) then
CurrReport.QUIT();
IssuedReminderHeader.SETRANGE("OIOUBL-GLN");
// Some already sent?
IssuedReminderHeader.SETRANGE("OIOUBL-Electronic Reminder Created", TRUE);
if IssuedReminderHeader.FINDFIRST() then
if NOT CONFIRM(DocumentAlreadyCreatedQst, TRUE) then
CurrReport.QUIT();
SETFILTER("OIOUBL-GLN", '<>%1', '');
end;
}
}
requestpage
{
layout
{
area(content)
{
group("")
{
Caption = 'Options';
field(LogInteraction; LogInteraction)
{
ApplicationArea = Basic, Suite;
Caption = 'Log Interaction';
Enabled = LogInteractionEnable;
Tooltip = 'Specifies if you want to record the related interactions with the involved contact person in the Interaction Log Entry table.';
}
}
}
}
actions
{
}
trigger OnInit();
begin
LogInteractionEnable := TRUE;
end;
trigger OnOpenPage();
begin
InitLogInteraction();
LogInteractionEnable := LogInteraction;
end;
}
labels
{
}
trigger OnPreReport();
begin
if NOT CurrReport.USEREQUESTPAGE() then
InitLogInteraction();
end;
var
SegManagement: Codeunit 5051;
Counter: Integer;
LogInteraction: Boolean;
DocumentsWillBeSkippedQst: Label 'One or more issued reminders that match your filter criteria are not electronic reminders and will be skipped.\\Do you want to continue?';
DocumentAlreadyCreatedQst: Label 'One or more electronic reminders that match your filter criteria have been created before.\\Do you want to continue?';
SuccessMsg: Label 'Successfully created %1 electronic reminders.', Comment = '%1 = amount of electronic reminders';
NothingToCreateErr: Label 'There is nothing to create.';
[InDataSet]
LogInteractionEnable: Boolean;
procedure InitLogInteraction();
begin
LogInteraction := SegManagement.FindInteractTmplCode(8) <> '';
end;
}
| 34.130769 | 180 | 0.52513 |
ed4a1c1484b379ede94c8a9e171ac373d8342f43
| 992 |
pl
|
Perl
|
hashdecoder.pl
|
mutr0l/HashDecrypterCrypter-by-mutr0l
|
36d5ac0b54548ffd3b345ce94d66ad70c0cb33ea
|
[
"Apache-2.0"
] | null | null | null |
hashdecoder.pl
|
mutr0l/HashDecrypterCrypter-by-mutr0l
|
36d5ac0b54548ffd3b345ce94d66ad70c0cb33ea
|
[
"Apache-2.0"
] | null | null | null |
hashdecoder.pl
|
mutr0l/HashDecrypterCrypter-by-mutr0l
|
36d5ac0b54548ffd3b345ce94d66ad70c0cb33ea
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/perl
#codado por mutr0l
#hash decoder script
#API md5.darkbyte.ru
print <<banner;
███╗ ███╗██████╗ ███████╗██╗ ██╗ █████╗ ███████╗██╗ ██╗
████╗ ████║██╔══██╗██╔════╝██║ ██║██╔══██╗██╔════╝██║ ██║
██╔████╔██║██║ ██║███████╗███████║███████║███████╗███████║
██║╚██╔╝██║██║ ██║╚════██║██╔══██║██╔══██║╚════██║██╔══██║
██║ ╚═╝ ██║██████╔╝███████║██║ ██║██║ ██║███████║██║ ██║
╚═╝ ╚═╝╚═════╝ ╚══════╝╚═╝ ╚═╝╚═╝ ╚═╝╚══════╝╚═╝ ╚═╝
By mutr0l
banner
use LWP::UserAgent;
use HTTP::Response;
$hash=$ARGV[0] or
die "Use:\n".
"Criptografar: hashdecoder.pl <nome>\n".
"Descriptografar: hashdecoder.pl <hashmd5>\n";
if(api){
# API do md5.darkbyte.ru #
$agente = LWP::UserAgent->new();
$dec = $agente->get("http://md5.darkbyte.ru/api.php?q=$hash");
print "[API] resultado: \t\t".$dec->content()."\n";
}
| 28.342857 | 76 | 0.313508 |
ed63f1f5934718d04323cb92d958d42d2391ba63
| 14,194 |
t
|
Perl
|
t/020-experiment.t
|
MadcapJake/Test-Lab
|
426512b31da69642145517ac2475050c1fa8f7ab
|
[
"Artistic-2.0"
] | 5 |
2016-02-17T17:01:24.000Z
|
2021-01-05T15:48:15.000Z
|
t/020-experiment.t
|
MadcapJake/Test-Lab
|
426512b31da69642145517ac2475050c1fa8f7ab
|
[
"Artistic-2.0"
] | 3 |
2016-02-21T04:16:25.000Z
|
2016-02-22T21:18:19.000Z
|
t/020-experiment.t
|
MadcapJake/Test-Lab
|
426512b31da69642145517ac2475050c1fa8f7ab
|
[
"Artistic-2.0"
] | 5 |
2016-02-20T19:51:39.000Z
|
2020-01-10T08:51:31.000Z
|
use v6;
use Test;
use lib 'lib';
use Test::Lab::Experiment;
use Test::Lab::Result;
use Test::Lab::Errors;
class Fake is Test::Lab::Experiment {
has $.published-result;
has @!exceptions;
method exceptions { @!exceptions }
method died($operation, Exception $exception) {
@!exceptions.push: ($operation, $exception);
}
method is-enabled { True }
method publish(Test::Lab::Result $result) { $!published-result = $result }
}
subtest {
subtest {
my $ex = Test::Lab::Experiment.new(:name<hello>);
isa-ok $ex, Test::Lab::Experiment, 'uses builtin defaults';
is $ex.name, "hello", "default name properly set";
}, 'has a default implementation';
is Fake.new.name, "experiment", "properly defaults to 'experiment'";
subtest {
plan 2;
my $ex = Fake.new();
try {
$ex.run;
CATCH {
when X::BehaviorMissing {
pass 'properly throws BehaviorMissing exception';
is 'control', $_.name, 'the missing behavior is the control';
}
}
}
}, "can't be run without a control behavior";
{
my $ex = Fake.new();
$ex.use: { 'control' }
is 'control', $ex.run, 'is a straight pass-through with only a control behavior'
}
{
my $ex = Fake.new();
$ex.use: { 'control' }
$ex.try: { 'candidate' }
is 'control', $ex.run, 'runs other behaviors but alwas returns the control';
}
subtest {
plan 3;
my $ex = Fake.new();
$ex.use: { 'control' }
try {
CATCH {
when X::BehaviorNotUnique {
pass 'caught duplicate control block';
is $ex, $_.experiment, 'exception has the experiment';
is 'control', $_.name, 'exception has the name';
}
default { flunk 'did not return correct Exception' }
}
$ex.use: { 'control-again' }
flunk 'Did not throw error on duplicate control block';
}
}, 'complains about duplicate behavior names';
{
my $ex = Fake.new;
$ex.use: { 'control' }
$ex.try: { die 'candidate' }
is 'control', $ex.run, 'swallows exceptions thrown by candidate behaviors';
}
{
my $ex = Fake.new;
$ex.use: { die 'control' }
$ex.try: { 'candidate' }
try {
$ex.run;
CATCH {
default {
is 'control', $_.message,
'passes through exceptions thrown by the control behavior' }
}
}
}
=begin TakesLong
subtest {
plan 1;
my $ex = Fake.new;
my ($last, @runs);
$ex.use: { $last = 'control' }
$ex.try: { $last = 'candidate' }
for ^1000 { $ex.run; @runs.push: $last }
ok @runs.unique.elems > 1;
}, 'shuffles behaviors before running';
=end TakesLong
subtest {
plan 3;
my $ex = Test::Lab::Experiment.new(:name<hello>);
isa-ok $ex, Test::Lab::Experiment;
my role Boom { method publish($result) { die 'boomtown' } }
$ex = $ex but Boom;
$ex.use: { 'control' }
$ex.try: { 'candidate' }
try {
$ex.run;
CATCH {
when X::AdHoc {
pass 'adhoc error thrown';
is 'boomtown', $_.message
}
}
flunk 'never threw boomtown error';
}
}, 're-throws exceptions thrown during publish by default';
subtest {
plan 3;
my $ex = Fake.new;
my role Boom { method publish($result) { die 'boomtown' } }
$ex = $ex but Boom;
$ex.use: { 'control' }
$ex.try: { 'candidate' }
is 'control', $ex.run;
my (\op, \exception) = $ex.exceptions.pop;
is 'publish', op;
is 'boomtown', exception.message;
}, 'reports publishing errors';
subtest {
plan 2;
my $ex = Fake.new;
$ex.use: { 1 }
$ex.try: { 1 }
is 1, $ex.run;
ok $ex.published-result.defined;
}, 'publishes results';
subtest {
plan 2;
my $ex = Fake.new;
$ex.use: { 1 }
is 1, $ex.run;
nok $ex.published-result;
}, 'does not publish results when there is only a control value';
subtest {
plan 2;
my Fake $ex .= new;
$ex.comparator = -> $a, $b { $a ~~ $b }
$ex.use: { '1' }
$ex.try: { 1 }
is '1', $ex.run;
ok $ex.published-result.is-matched;
}, 'compares results with a comparator block if provided';
subtest {
plan 2;
my Fake $experiment .= new;
my Test::Lab::Observation $a .= new :name('a') :$experiment :block({ 1 });
my Test::Lab::Observation $b .= new :name('b') :$experiment :block({ 2 });
ok $experiment.obs-are-equiv($a, $a);
nok $experiment.obs-are-equiv($a, $b);
}, 'knows how to compare two experiments';
{
my Fake $experiment .= new;
my Test::Lab::Observation $a .= new :name('a') :$experiment :block({ '1' });
my Test::Lab::Observation $b .= new :name('b') :$experiment :block({ 1 });
$experiment.comparator = -> $a, $b { $a ~~ $b };
ok $experiment.obs-are-equiv($a, $b),
'uses a compare block to determine if observations are equivalent';
}
subtest {
plan 3;
my Fake $experiment .= new;
$experiment.comparator = -> $a, $b { die 'boomtown' }
$experiment.use: { 'control' }
$experiment.try: { 'candidate' }
is 'control', $experiment.run;
my (\op, \ex) = $experiment.exceptions.pop;
is 'compare', op;
is 'boomtown', ex.message;
}, 'reports errors in a compare block';
subtest {
plan 3;
my Fake $experiment .= new;
my role EnabledError { method is-enabled { die 'kaboom' } };
$experiment = $experiment but EnabledError;
$experiment.use: { 'control' }
$experiment.try: { 'candidate' }
is 'control', $experiment.run;
my (\op, \ex) = $experiment.exceptions.pop;
is 'enabled', op;
is 'kaboom', ex.message;
}, 'reports errors in the is-enabled method';
subtest {
plan 3;
my Fake $experiment .= new;
$experiment.run-if = { die 'kaboom' }
$experiment.use: { 'control' }
$experiment.try: { 'candidate' }
is 'control', $experiment.run;
my (\op, \ex) = $experiment.exceptions.pop;
is 'run-if', op;
is 'kaboom', ex.message;
}, 'reports errors in a run-if block';
{
my Fake $experiment .= new;
is $experiment.clean-value(10), 10, 'returns the given value when no clean block is configured';
}
{
my Fake $experiment .= new;
$experiment.cleaner = { .uc }
is $experiment.clean-value('test'), 'TEST',
'calls the configured clean routine with a value when configured';
}
subtest {
plan 4;
my Fake $experiment .= new;
$experiment.cleaner = -> $value { die 'kaboom' }
$experiment.use: { 'control' }
$experiment.try: { 'candidate' }
is $experiment.run, 'control';
is $experiment.published-result.control.cleaned-value, 'control';
my (\op, \ex) = $experiment.exceptions.pop;
is op, 'clean';
is ex.message, 'kaboom';
}, 'reports an error and returns the original vlaue when an' ~
'error is raised in a clean block';
}, 'Test::Lab::Experiment';
subtest {
{
my ($candidate-ran, $run-check-ran) = False xx 2;
my Fake $experiment .= new;
$experiment.use: { 1 }
$experiment.try: { $candidate-ran = True; 1 }
$experiment.run-if = { $run-check-ran = True; False }
$experiment.run;
ok $run-check-ran, 'run-if is properly called';
nok $candidate-ran, 'does not run the experiment if run-if returns false';
}
{
my ($candidate-ran, $run-check-ran) = False xx 2;
my Fake $experiment .= new;
$experiment.use: { True }
$experiment.try: { $candidate-ran = True }
$experiment.run-if = { $run-check-ran = True }
$experiment.run;
ok $run-check-ran, 'run-if is properly called';
ok $candidate-ran, 'runs the experiment if the given block returns true';
}
}, 'Test::Lab::Experiment.run-if';
subtest {
sub prep {
my Fake $experiment .= new;
($experiment,
Test::Lab::Observation.new :name<a> :$experiment :block({ 1 }),
Test::Lab::Observation.new :name<b> :$experiment :block({ 2 }))
}
sub it($behavior, &block) {
my ($*ex, $*a, $*b) = prep();
subtest &block, $behavior;
}
it 'does not ignore an observation if no ignores are configured', {
nok $*ex.ignore-mismatched-obs($*a, $*b);
}
it 'calls a configured ignore block with the given observed values', {
my $c = False;
$*ex.ignore: -> $a, $b {
is $*a.value, $a;
is $*b.value, $b;
$c = True;
}
ok $*ex.ignore-mismatched-obs($*a, $*b);
ok $c;
}
it 'calls multiple ignore blocks to see if any match', {
my ($called-one, $called-two, $called-three) = False xx 3;
$*ex.ignore: -> $a, $b { $called-one = True; False }
$*ex.ignore: -> $a, $b { $called-two = True; False }
$*ex.ignore: -> $a, $b { $called-three = True; False }
nok $*ex.ignore-mismatched-obs($*a, $*b);
ok $called-one;
ok $called-two;
ok $called-three;
}
it "only calls ignore blocks until one matches", {
my ($called-one, $called-two, $called-three) = False xx 3;
$*ex.ignore: -> $a, $b { $called-one = True; False }
$*ex.ignore: -> $a, $b { $called-two = True; True }
$*ex.ignore: -> $a, $b { $called-three = True; False }
ok $*ex.ignore-mismatched-obs: $*a, $*b;
ok $called-one;
ok $called-two;
nok $called-three;
}
it 'reports exceptions raised in an ignore block and returns false', {
$*ex.ignore: -> $a, $b { die 'kaboom' }
nok $*ex.ignore-mismatched-obs($*a, $*b);
my (\op, \exception) = $*ex.exceptions.pop;
is op, 'ignore';
is exception.message, 'kaboom';
}
it 'skips ignore blocks that throw and tests any remaining ' ~
'blocks if an exception is swalloed', {
$*ex.ignore: -> $a, $b { die 'kaboom' }
$*ex.ignore: -> $a, $b { True }
ok $*ex.ignore-mismatched-obs($*a, $*b);
is $*ex.exceptions.elems, 1;
}
}, 'Test::Lab::Experiment.ignore-mismatched-obs';
subtest {
sub it($behavior, &block) {
my role Dier { has $!throw-on-mismatches }
my Fake $ex .= new;
my $*ex = $ex but Dier;
subtest &block, $behavior;
}
it 'throws when there is a mismatch if throw-on-mismatches ' ~
'is enabled', {
$*ex.throw-on-mismatches: True;
$*ex.use: { 'fine' }
$*ex.try: { 'not fine' }
throws-like { $*ex.run }, X::Test::Lab::Mismatch;
}
it 'doesn\'t throw when there is a mismatch if ' ~
'throw-on-mismatches is disabled', {
plan 2;
$*ex.throw-on-mismatches: False;
$*ex.use: { 'fine' }
$*ex.try: { 'not fine' }
lives-ok { is $*ex.run, 'fine' };
}
it 'throws a Mismatch error if the control raises ' ~
'and candidate doesn\'t', {
plan 1;
$*ex.throw-on-mismatches: True;
$*ex.use: { die 'control' }
$*ex.try: { 'candidate' }
throws-like { $*ex.run }, X::Test::Lab::Mismatch;
}
it 'throws a Mismatch error if the candidate raises ' ~
'and control doesn\'t', {
plan 1;
$*ex.throw-on-mismatches: True;
$*ex.use: { 'control' }
$*ex.try: { die 'candidate' }
throws-like { $*ex.run }, X::Test::Lab::Mismatch;
}
subtest {
it 'throws when there is a mismatch if the experiment ' ~
'instance\'s throw-on-mismatches is enabled', {
Fake.throw-on-mismatches: False;
$*ex.throw-on-mismatches: True;
$*ex.use: { 'fine' }
$*ex.try: { 'not fine' }
throws-like { $*ex.run }, X::Test::Lab::Mismatch;
}
it 'doesn\'t throw when there is a mismatch if the ' ~
'experiment instance\'s throw-on-mismatches is disabled', {
Fake.throw-on-mismatches: True;
$*ex.throw-on-mismatches: False;
$*ex.use: { 'fine' }
$*ex.try: { 'not fine' }
is $*ex.run, 'fine';
}
it 'respects the throw-on-mismatches class variable by default', {
Fake.throw-on-mismatches: False;
$*ex.use: { 'fine' }
$*ex.try: { 'not fine' }
is $*ex.run, 'fine';
Fake.throw-on-mismatches: True;
throws-like { $*ex.run }, X::Test::Lab::Mismatch;
}
}, 'method throw-on-mismatches';
subtest {
sub it($behavior, &block) {
Fake.throw-on-mismatches: True;
my $*ex = Fake.new;
$*ex.use: { 'foo' }
$*ex.try: { 'bar' }
my $*err;
try {
CATCH { default { $*err = $_; subtest &block, $behavior } }
$*ex.run;
}
}
it 'has the name of the experiment', {
is $*err.name, $*ex.name;
}
it 'includes the experiments\' results', {
is $*err.result, $*ex.published-result;
}
it 'formats nicely as a string', {
is $*err.Str, q:to/ERROR/;
experiment experiment observations mismatched:
control:
"foo"
candidate:
"bar"
ERROR
}
it 'includes the backtrace when an observation throws', {
my $mismatch;
my Fake $experiment .= new;
$experiment.use: { 'value' }
$experiment.try: { die 'error' }
try {
CATCH {
when X::Test::Lab::Mismatch {
pass 'X::Test::Lab::Mismatch thrown';
$mismatch = $_;
}
default { flunk 'wrong error thrown' }
}
$experiment.run;
flunk 'no error thrown';
}
my $lines = $mismatch.Str.lines;
is $lines[1], 'control:';
is $lines[2], ' "value"';
is $lines[3], 'candidate:';
is $lines[4], ' X::AdHoc.new(payload => "error")';
like $lines[5], /\s+in\s.+\sat\s.+\sline\s\d+/;
}
}, 'X::Test::Lab::Mismatch';
}, 'throwing on mismatches';
subtest {
subtest {
my Fake $ex .= new;
my ($cont-ok, $cand-ok, $before) = False xx 2;
$ex.before = { $before = True }
$ex.use: { $cont-ok = $before }
$ex.try: { $cand-ok = $before }
$ex.run;
ok $before, '«before» should have run';
ok $cont-ok, 'control should have run after «before»';
ok $cand-ok, 'candidate should have run after «before»';
}, 'runs when an experiment is enabled';
subtest {
my $before = False;
my Fake $f .= new;
my role FalseEnabled { method is-enabled { False } }
my $ex = $f but FalseEnabled;
$ex.before = { $before = True }
$ex.use: { 'value' }
$ex.try: { 'value' }
$ex.run;
nok $before, '«before» should not have run';
}
}, '«before» block';
done-testing;
| 25.437276 | 100 | 0.564393 |
ed19515a76722ae71d2f3774374357f1913711e1
| 1,394 |
pm
|
Perl
|
lib/Business/EDI/CodeList/DutyOrTaxOrFeeFunctionCodeQualifier.pm
|
atz/Business-EDI
|
27514f11f91bccda85b8b411074d3dddbc8b28b1
|
[
"Artistic-1.0-cl8"
] | 1 |
2015-10-29T13:18:40.000Z
|
2015-10-29T13:18:40.000Z
|
lib/Business/EDI/CodeList/DutyOrTaxOrFeeFunctionCodeQualifier.pm
|
atz/Business-EDI
|
27514f11f91bccda85b8b411074d3dddbc8b28b1
|
[
"Artistic-1.0-cl8"
] | null | null | null |
lib/Business/EDI/CodeList/DutyOrTaxOrFeeFunctionCodeQualifier.pm
|
atz/Business-EDI
|
27514f11f91bccda85b8b411074d3dddbc8b28b1
|
[
"Artistic-1.0-cl8"
] | null | null | null |
package Business::EDI::CodeList::DutyOrTaxOrFeeFunctionCodeQualifier;
use base 'Business::EDI::CodeList';
my $VERSION = 0.02;
sub list_number {5283;}
my $usage = 'B';
# 5283 Duty or tax or fee function code qualifier [B]
# Desc: Code qualifying the function of a duty or tax or fee.
# Repr: an..3
my %code_hash = (
'1' => [ 'Individual duty, tax or fee (Customs item)',
'Individual duty, tax or fee charged on a single Customs item line of the goods declaration (CCC).' ],
'2' => [ 'Total of all duties, taxes and fees (Customs item)',
'Total of all duties, taxes and fees charged on a single Customs item line of the goods declaration (CCC).' ],
'3' => [ 'Total of each duty, tax or fee type (Customs declaration)',
'Total of each duty, tax or fee charged on the goods declaration (CCC).' ],
'4' => [ 'Total of all duties, taxes and fee types (Customs',
'declaration) Total of all duties, taxes and fees charged on the goods declaration (CCC).' ],
'5' => [ 'Customs duty',
'Duties laid down in the Customs tariff to which goods are liable on entering or leaving the Customs territory (CCC).' ],
'6' => [ 'Fee',
'Charge for services rendered.' ],
'7' => [ 'Tax',
'Contribution levied by an authority.' ],
'9' => [ 'Tax related information',
'Code specifying information related to tax.' ],
);
sub get_codes { return \%code_hash; }
1;
| 42.242424 | 125 | 0.667145 |
ed0a01c684cc61872c411ef2c08fd076f699db4b
| 4,125 |
pm
|
Perl
|
perl/lib/SCAMP/UnRPC/JsonStore.pm
|
Aceeri/scamp
|
5c55ebab123841d5e15e62b783e91be4e6d186fd
|
[
"MIT"
] | null | null | null |
perl/lib/SCAMP/UnRPC/JsonStore.pm
|
Aceeri/scamp
|
5c55ebab123841d5e15e62b783e91be4e6d186fd
|
[
"MIT"
] | null | null | null |
perl/lib/SCAMP/UnRPC/JsonStore.pm
|
Aceeri/scamp
|
5c55ebab123841d5e15e62b783e91be4e6d186fd
|
[
"MIT"
] | null | null | null |
package UnRPC::JsonStore;
use Moose;
use namespace::autoclean;
use Pod::JSchema;
use JSON;
use JSON::XS;
has 'discovery' => (is => 'ro', isa => 'UnRPC::Discovery', required => 1);
has 'api' => (is => 'ro', lazy_build => 1 );
sub BUILD { $_[0]->api }
sub invoke_action {
my ($self, $request, $action, $params) = @_;
$request->stash( params => $params );
$request->stash( mode => 'jsonstore' );
$request->stash( xactionstr => $params->{xaction} || 'read' );
my $withmeta = $request->stash->{xactionstr} eq 'read';
# not implemented: :LIST fallback, "meta", authz
my $api = $self->api->{ $action->name };
# $self->check_access( $c, $action );
# HACK - this should be just sending a JSON post, but that doesn't work due to an ExtJS bug.
if ( $params->{records} && !ref($params->{records}) ) {
$params->{records} = JSON->new->allow_nonref->decode( $params->{records} );
}
$action->call( $request, $params );
my $response = $request->stash->{response} ||= {};
ref( $response ) eq 'HASH' or die ["Sanity error - response not found"];
my $root = $request->stash->{root} || 'records';
my $data = $response->{$root} ||= [];# || die ["Root '$root' not found"];
$data = [$data] unless ref($data) eq 'ARRAY'; # data is always a list
if (!exists $response->{recordcount}){ # don't override long counts needed for buffered stores
$response->{recordcount} = scalar @{$data};
}
$response->{success} = JSON::true;
if ( $withmeta ){
my $first = length (@$data) ? $data->[0] : {};
my $fieldlist = $request->stash->{fields} || [ keys %{ $first } ];
$response->{metaData} = {
idProperty => $request->stash->{idProperty} || ( exists( $first->{idx} ) ? 'idx' : 'id'), # evil / lazy
root => $root,
totalProperty => 'recordcount',
successProperty => 'success',
messageProperty => 'message',
fields => $self->_metafields( $api, $fieldlist ),
};
}
return JSON::XS->new->ascii->convert_blessed->encode($response);
}
sub _metafields{
my ($self,$def,$fieldref) = @_;
if ( ! exists $def->{snip} ){
$def->{snip} = undef; # now exists but false
my $schema = $def->{schema} or return $fieldref;
my $ret = $schema->return_schema or return $fieldref;
my $snip = $ret->rawlocate('properties/records/items/properties') || $ret->rawlocate('properties/records/properties');
ref($snip) eq 'HASH' or return $fieldref;
$def->{snip} = $snip;
}
my $snip = $def->{snip} or return $fieldref;
my %gotfields;
my @outfields;
foreach my $name (@$fieldref){
my $fdef = $snip->{$name};
$gotfields{$name} = 1;
if(ref($fdef) eq 'HASH' and $fdef->{type}){
push @outfields, { name => $name, type => $fdef->{type} };
}else{
push @outfields, { name => $name };
}
}
foreach my $name (keys %$snip){
$gotfields{$name} && next;
my $fdef = $snip->{$name};
if(ref($fdef) eq 'HASH' and $fdef->{type}){
push @outfields, { name => $name, type => $fdef->{type} };
}
}
return \@outfields;
}
sub _build_api {
my ($self) = shift;
my %data;
my %xaction_map = map {$_ => 1} qw'create read update destroy';
my $modules = $self->discovery->modules;
foreach my $ns ( keys %$modules ) {
my $mo = $modules->{$ns};
my %jschemas;
my $filename = $mo->name;
$filename =~ s|::|/|g;
$filename .= ".pm";
if ( my $file = $INC{$filename} ){
my $pjs = Pod::JSchema->new( filename => $file );
map { $data{$ns . '.' . $_->name}{schema} = $_->schema } @{ $pjs->methods || [] };
}
}
return \%data;
}
sub get_params {
my ($self, $rq) = @_;
my $body = $rq->request_body;
die $rq->request_error unless defined $body;
return JSON::XS->new->utf8->decode($body);
}
__PACKAGE__->meta->make_immutable;
| 29.676259 | 126 | 0.532121 |
ed65e6d3f1a27a5e4d1179b134b2e2ffb501511a
| 4,665 |
pl
|
Perl
|
fasta/clip_small_leading_contigs.pl
|
SchwarzEM/ems_perl
|
0c20b1fe1d215689ee8db3677b23175bd968841f
|
[
"BSD-3-Clause"
] | 2 |
2021-07-19T09:00:17.000Z
|
2021-08-30T02:45:18.000Z
|
fasta/clip_small_leading_contigs.pl
|
Superboy666/ems_perl
|
ce78eb5c2120566e6e55a786ebd15382cb38736f
|
[
"BSD-3-Clause"
] | null | null | null |
fasta/clip_small_leading_contigs.pl
|
Superboy666/ems_perl
|
ce78eb5c2120566e6e55a786ebd15382cb38736f
|
[
"BSD-3-Clause"
] | 1 |
2021-07-19T09:00:18.000Z
|
2021-07-19T09:00:18.000Z
|
#!/usr/bin/env perl
# clip_small_leading_contigs.pl -- Erich Schwarz <[email protected]>, 7/22/2013.
# Purpose: given a genome assembly with some remaining leading very small contigs, clip scaffolds to remove them, and report the nt shifted for each scaffold.
use strict;
use warnings;
use Getopt::Long;
use Scalar::Util qw(looks_like_number);
my @infiles = ();
my @input_scaffolds = ();
my $scaffold = q{};
my $threshold = 0;
my $extra_nt = 0;
my $data_ref;
my $help;
GetOptions ( 'infiles=s{,}' => \@infiles,
'threshold=i' => \$threshold,
'help' => \$help, );
if ( $help or (! @infiles) or (! looks_like_number($threshold) ) or ( $threshold != int $threshold ) or ( $threshold <= 0 ) ) {
die "Format: clip_small_leading_contigs\n",
" --infile|-i <input stream/files>\n",
" --threshold|-t [maximum length of leading 5' ACGTacgt contigs to censor (along with first block of scaffolding N; must be positive integer]\n",
" --help|-h [print this message]\n",
;
}
foreach my $infile (@infiles) {
my $INPUT_FILE;
if ($infile eq '-') {
# Special case: get the stdin handle
$INPUT_FILE = *STDIN{IO};
}
else {
# Standard case: open the file
open $INPUT_FILE, '<', $infile or die "Can't open input file $infile. $!\n";
}
while (my $input = <$INPUT_FILE>) {
chomp $input;
if ( $input =~ /\A > (\S+) .*\z/xms ) {
$scaffold = $1;
if ( exists $data_ref->{'scaffold'}->{$scaffold} ) {
die "Redundant sequence name: $scaffold\n";
}
push @input_scaffolds, $scaffold;
# Note that $input includes the starting '>' for a FASTA record line.
$data_ref->{'scaffold'}->{$scaffold}->{'header'} = $input;
}
elsif ( $input =~ /\A > /xms ) {
die "Can't parse input line: $input\n";
}
else {
$data_ref->{'scaffold'}->{$scaffold}->{'seq'} .= $input;
}
}
close $INPUT_FILE or die "Can't close filehandle to input file $infile. $!\n";
}
LOOP: foreach my $scaf (@input_scaffolds) {
my $orig_seq = $data_ref->{'scaffold'}->{$scaf}->{'seq'};
my $orig_header = $data_ref->{'scaffold'}->{$scaf}->{'header'};
my $new_seq = q{};
my $start_acgt = q{};
# Trivial situation. Return $new_seq identical to $orig_seq.
if ( $orig_seq =~ /\A [^Nn]+ \z/xms ) {
$new_seq = $orig_seq;
}
# Non-trivial situation.
# Split into very first ACGTacgt, first N, etc.
# End up with $new_seq in which we censor very first ACGTacgt + first N if very first ACGTacgt is too small.
else {
# Before we start editing, deal with pathological scaffolds that start with N.
# For this script, we *do* want to deal with $orig_seq if it starts with N; though it is still a good idea to have a warning message about it.
# Since $threshold *must* be at least 'A', prepending 1 nt is guaranteed to make a trimmable 5' end for any scaffold that had previously started with N.
if ( $orig_seq =~ /\A ([Nn]) /xms ) {
warn "Pathological scaffold $scaf starts with an N residue; however, it will still be 5'-trimmed of N residues.\n";
$orig_seq = q{A} . $orig_seq;
$extra_nt = 1;
}
# Next, do the trimming.
if ( $orig_seq =~ /\A ([^Nn]+) ([Nn]+) ([^Nn]+ .*) \z/xms ) {
my $first_acgt = $1;
my $n_scaffold = $2;
my $residuum = $3;
# Default:
$new_seq = $orig_seq;
# But if we have a leading small contig, clip, and report the clip:
my $acgt_len = length($first_acgt);
my $trim_len = length($n_scaffold);
$trim_len += $acgt_len;
# Unless the scaffold is pathological, $extra_nt is 0; if it is pathological, $extra_nt is 1.
$trim_len -= $extra_nt;
if ( $acgt_len < $threshold ) {
$new_seq = $residuum;
warn "Trimmed 5' end of $scaf by $trim_len nt; correct gene annotations accordingly.\n";
}
}
else {
die "Failed to parse sequence of $scaf\n";
}
}
# At last, print the header and sequence.
# $orig_header kept the '>', remember.
print "$orig_header\n";
my @output_lines = unpack("a60" x (length($new_seq)/60 + 1), $new_seq);
foreach my $output_line (@output_lines) {
if ($output_line =~ /\S/) {
print "$output_line\n";
}
}
}
| 37.32 | 160 | 0.554126 |
ed0f77d3436b33ae47d95e1e894e57ac6b912aef
| 1,159 |
pm
|
Perl
|
lib/Test2/Compare/Wildcard.pm
|
clayne/Test2-Suite
|
e841c9aa13febf85c56dbc12fe9320cd0b1605e0
|
[
"Artistic-1.0"
] | null | null | null |
lib/Test2/Compare/Wildcard.pm
|
clayne/Test2-Suite
|
e841c9aa13febf85c56dbc12fe9320cd0b1605e0
|
[
"Artistic-1.0"
] | null | null | null |
lib/Test2/Compare/Wildcard.pm
|
clayne/Test2-Suite
|
e841c9aa13febf85c56dbc12fe9320cd0b1605e0
|
[
"Artistic-1.0"
] | null | null | null |
package Test2::Compare::Wildcard;
use strict;
use warnings;
use base 'Test2::Compare::Base';
our $VERSION = '0.000145';
use Test2::Util::HashBase qw/expect/;
use Carp qw/croak/;
sub init {
my $self = shift;
croak "'expect' is a require attribute"
unless exists $self->{+EXPECT};
$self->SUPER::init();
}
1;
__END__
=pod
=encoding UTF-8
=head1 NAME
Test2::Compare::Wildcard - Placeholder check.
=head1 DESCRIPTION
This module is used as a temporary placeholder for values that still need to be
converted. This is necessary to carry forward the filename and line number which
would be lost in the conversion otherwise.
=head1 SOURCE
The source code repository for Test2-Suite can be found at
F<https://github.com/Test-More/Test2-Suite/>.
=head1 MAINTAINERS
=over 4
=item Chad Granum E<lt>[email protected]<gt>
=back
=head1 AUTHORS
=over 4
=item Chad Granum E<lt>[email protected]<gt>
=back
=head1 COPYRIGHT
Copyright 2018 Chad Granum E<lt>[email protected]<gt>.
This program is free software; you can redistribute it and/or
modify it under the same terms as Perl itself.
See F<http://dev.perl.org/licenses/>
=cut
| 16.557143 | 80 | 0.726488 |
ed47224261b841699c485956e2e1c93faf800025
| 2,191 |
pl
|
Perl
|
util/mkfiles.pl
|
sketch-hq/openssl
|
2dc4be513b1ab59cce3f578488a870124dcff1b8
|
[
"OpenSSL"
] | null | null | null |
util/mkfiles.pl
|
sketch-hq/openssl
|
2dc4be513b1ab59cce3f578488a870124dcff1b8
|
[
"OpenSSL"
] | null | null | null |
util/mkfiles.pl
|
sketch-hq/openssl
|
2dc4be513b1ab59cce3f578488a870124dcff1b8
|
[
"OpenSSL"
] | null | null | null |
#!/usr/local/bin/perl
#
# This is a hacked version of files.pl for systems that can't do a 'make files'.
# Do a perl util/mkminfo.pl >MINFO to build MINFO
# Written by Steve Henson 1999.
# List of directories to process
my @dirs = (
".",
"crypto",
"crypto/md2",
"crypto/md4",
"crypto/md5",
"crypto/sha",
"crypto/mdc2",
"crypto/hmac",
"crypto/ripemd",
"crypto/des",
"crypto/rc2",
"crypto/rc4",
"crypto/rc5",
"crypto/idea",
"crypto/bf",
"crypto/cast",
"crypto/aes",
"crypto/camellia",
"crypto/seed",
"crypto/modes",
"crypto/cmac",
"crypto/bn",
"crypto/rsa",
"crypto/dsa",
"crypto/dso",
"crypto/dh",
"crypto/ec",
"crypto/buffer",
"crypto/bio",
"crypto/stack",
"crypto/lhash",
"crypto/rand",
"crypto/err",
"crypto/objects",
"crypto/evp",
"crypto/asn1",
"crypto/pem",
"crypto/x509",
"crypto/x509v3",
"crypto/cms",
"crypto/conf",
"crypto/jpake",
"crypto/txt_db",
"crypto/pkcs7",
"crypto/pkcs12",
"crypto/comp",
"crypto/engine",
"crypto/ocsp",
"crypto/ui",
#"crypto/store",
"crypto/pqueue",
"crypto/whrlpool",
"crypto/ts",
"crypto/srp",
"crypto/ct",
"crypto/async",
"crypto/chacha",
"crypto/poly1305",
"crypto/kdf",
"ssl",
"apps",
"engines",
"engines/ccgost",
"test",
"tools"
);
%top;
my $fipscanisteronly = 0;
foreach (@dirs) {
next if ($fipscanisteronly && !(-d $_));
&files_dir ($_, "Makefile");
}
exit(0);
sub files_dir
{
my ($dir, $makefile) = @_;
my %sym;
open (IN, "$dir/$makefile") || die "Can't open $dir/$makefile";
my $s="";
while (<IN>)
{
chop;
s/#.*//;
if (/^([^\s=]+)\s*=\s*(.*)$/)
{
$o="";
($s,$b)=($1,$2);
for (;;)
{
if ($b =~ /\\$/)
{
chop($b);
$o.=$b." ";
$b=<IN>;
chop($b);
}
else
{
$o.=$b." ";
last;
}
}
$o =~ s/^\s+//;
$o =~ s/\s+$//;
$o =~ s/\s+/ /g;
$o =~ s/\$[({]([^)}]+)[)}]/$top{$1} or $sym{$1}/ge;
$sym{$s}=($top{$s} or $o);
}
}
print "RELATIVE_DIRECTORY=$dir\n";
foreach (sort keys %sym)
{
print "$_=$sym{$_}\n";
}
if ($dir eq "." && defined($sym{"BUILDENV"}))
{
foreach (split(' ',$sym{"BUILDENV"}))
{
/^(.+)=/;
$top{$1}=$sym{$1};
}
}
print "RELATIVE_DIRECTORY=\n";
close (IN);
if ($dir eq "." && $sym{FIPSCANISTERONLY} eq "y")
{
$fipscanisteronly = 1;
}
}
| 14.320261 | 80 | 0.554085 |
ed66de2bc06e405172c3a76716c1662b18569b15
| 2,283 |
t
|
Perl
|
t/ffi_build__fortran.t
|
plicease/FFI-Platypus-Lang-Fortran
|
50902cfc05cf29ea2c5328ee36cc0ef893230f34
|
[
"Artistic-1.0"
] | 2 |
2015-02-10T12:09:09.000Z
|
2016-10-12T18:11:29.000Z
|
t/ffi_build__fortran.t
|
PerlFFI/FFI-Platypus-Lang-Fortran
|
256c800709c6f90a95f6662d0c8cb1dfdb8d494d
|
[
"Artistic-1.0"
] | 7 |
2015-02-10T21:54:28.000Z
|
2018-08-19T17:27:08.000Z
|
t/ffi_build__fortran.t
|
PerlFFI/FFI-Platypus-Lang-Fortran
|
256c800709c6f90a95f6662d0c8cb1dfdb8d494d
|
[
"Artistic-1.0"
] | null | null | null |
use strict;
use warnings;
use Test::More 0.98;
use lib 't/lib';
use Test::Cleanup;
use FFI::Build;
use FFI::Build::Platform;
use File::Temp qw( tempdir );
use Capture::Tiny qw( capture_merged );
use File::Spec;
use File::Path qw( rmtree );
use FFI::Platypus 1.00;
use File::Glob qw( bsd_glob );
$ENV{FFI_PLATYPUS_DLERROR} = 1;
subtest 'Fortran' => sub {
plan skip_all => 'Test requires Fortran compiler'
unless eval { FFI::Build::Platform->which(FFI::Build::Platform->for) };
plan skip_all => 'Test requires FFI::Platypus::Lang::Fortran'
unless eval { require FFI::Platypus::Lang::Fortran };
my $build = FFI::Build->new('foo',
dir => tempdir( "tmpbuild.XXXXXX", DIR => 'corpus/ffi_build/project-fortran' ),
buildname => "tmpbuild.$$.@{[ time ]}",
verbose => 1,
);
$build->source('corpus/ffi_build/project-fortran/add*.f*');
note "$_" for $build->source;
my($out, $dll, $error) = capture_merged {
my $dll = eval { $build->build };
($dll, $@);
};
ok $error eq '', 'no error';
if($error)
{
diag $out;
return;
}
else
{
note $out;
}
my $ffi = FFI::Platypus->new( api => 1 );
$ffi->lang('Fortran');
$ffi->lib($dll);
my $ok = 1;
$ok &&= is(
eval { $ffi->function( add1 => [ 'integer*', 'integer*' ] => 'integer' )->call(\1,\2) } || diag($@),
3,
'FORTRAN 77',
);
$ok &&= is(
eval { $ffi->function( add2 => [ 'integer*', 'integer*' ] => 'integer' )->call(\1,\2) } || diag($@),
3,
'Fortran 90',
);
$ok &&= is(
eval { $ffi->function( add3 => [ 'integer*', 'integer*' ] => 'integer' )->call(\1,\2) } || diag($@),
3,
'Fortran 95',
);
unless($ok)
{
diag("build output:\n$out");
if(my $nm = FFI::Build::Platform->which('nm'))
{
diag capture_merged {
my @cmd = ('nm', $build->file->path);
print "+ @cmd\n";
system @cmd;
();
};
}
if(my $ldd = FFI::Build::Platform->which('ldd'))
{
diag capture_merged {
my @cmd = ('ldd', $build->file->path);
print "+ @cmd\n";
system @cmd;
();
};
}
}
cleanup(
$build->file->dirname,
File::Spec->catdir(qw( corpus ffi_build project-fortran ), $build->buildname)
);
};
done_testing;
| 21.537736 | 104 | 0.535699 |
ed1429ed67530245634b06940e8a9fe4d4bc0351
| 14,907 |
pm
|
Perl
|
src/perl5/Bio/JBrowse/Cmd/FormatSequences.pm
|
rtylerr/jbrowse1.11-MultiBigWig
|
a1072b672b86263b0a9e0b1a12769421e2e971d3
|
[
"Artistic-2.0"
] | null | null | null |
src/perl5/Bio/JBrowse/Cmd/FormatSequences.pm
|
rtylerr/jbrowse1.11-MultiBigWig
|
a1072b672b86263b0a9e0b1a12769421e2e971d3
|
[
"Artistic-2.0"
] | null | null | null |
src/perl5/Bio/JBrowse/Cmd/FormatSequences.pm
|
rtylerr/jbrowse1.11-MultiBigWig
|
a1072b672b86263b0a9e0b1a12769421e2e971d3
|
[
"Artistic-2.0"
] | null | null | null |
package Bio::JBrowse::Cmd::FormatSequences;
=head1 NAME
Bio::JBrowse::Cmd::FormatSequences - script module to format reference
sequences (backend module for prepare-refseqs.pl)
=cut
use strict;
use warnings;
use base 'Bio::JBrowse::Cmd';
use Pod::Usage ();
use File::Spec::Functions qw/ catfile catdir /;
use File::Path 'mkpath';
use POSIX;
use Bio::JBrowse::JSON;
use JsonFileStorage;
sub option_defaults {(
out => 'data',
chunksize => 20_000,
seqType => 'DNA'
)}
sub option_definitions {(
"out=s",
"conf=s",
"noseq",
"gff=s",
"chunksize=s",
"fasta=s@",
"sizes=s@",
"refs=s",
"reftypes=s",
"compress",
"trackLabel=s",
"seqType=s",
"key=s",
"help|h|?",
"nohash"
)}
sub run {
my ( $self ) = @_;
my $compress = $self->opt('compress');
$self->{storage} = JsonFileStorage->new( $self->opt('out'), $self->opt('compress'), { pretty => 0 } );
Pod::Usage::pod2usage( 'must provide either a --fasta, --sizes, --gff, or --conf option' )
unless $self->opt('gff') || $self->opt('conf') || $self->opt('fasta') || $self->opt('sizes');
{
my $chunkSize = $self->opt('chunksize');
$chunkSize *= 4 if $compress;
$self->{chunkSize} = $chunkSize;
}
my $refs = $self->opt('refs');
if ( $self->opt('fasta') && @{$self->opt('fasta')} ) {
die "--refids not implemented for FASTA files" if defined $self->opt('refids');
$self->exportFASTA( $refs, $self->opt('fasta') );
$self->writeTrackEntry();
}
elsif ( $self->opt('gff') ) {
my $db;
my $gff = $self->opt('gff');
my $gzip = '';
if( $gff =~ /\.gz$/ ) {
$gzip = ':gzip';
}
open my $fh, "<$gzip", $gff or die "$! reading GFF file $gff";
while ( <$fh> ) {
if( /^##FASTA\s*$/i ) {
# start of the sequence block, pass the filehandle to our fasta database
$self->exportFASTA( $refs, [$fh] );
last;
}
elsif( /^>/ ) {
# beginning of implicit sequence block, need to seek
# back
seek $fh, -length($_), SEEK_CUR;
$self->exportFASTA( $refs, [$fh] );
last;
}
}
$self->writeTrackEntry();
} elsif ( $self->opt('conf') ) {
my $config = Bio::JBrowse::JSON->new->decode_file( $self->opt('conf') );
eval "require $config->{db_adaptor}; 1" or die $@;
my $db = eval {$config->{db_adaptor}->new(%{$config->{db_args}})}
or warn $@;
die "Could not open database: $@" unless $db;
if (my $refclass = $config->{'reference class'}) {
eval {$db->default_class($refclass)};
}
$db->strict_bounds_checking(1) if $db->can('strict_bounds_checking');
$self->exportDB( $db, $refs, {} );
$self->writeTrackEntry();
}
elsif( $self->opt('sizes') ) {
my %refseqs;
for my $sizefile ( @{$self->opt('sizes')} ) {
open my $f, '<', $sizefile or warn "$! opening file $sizefile, skipping";
next unless $f;
while( my $line = <$f> ) {
next unless $line =~ /\S/;
chomp $line;
my ( $name, $length ) = split /\s+/,$line,2;
s/^\s+|\s+$//g for $name, $length;
$refseqs{$name} = {
name => $name,
start => 0,
end => $length,
length => $length
};
}
}
$self->writeRefSeqsJSON( \%refseqs );
}
}
sub trackLabel {
my ( $self ) = @_;
# use --trackLabel if given
return $self->opt('trackLabel') if $self->opt('trackLabel');
# otherwise construct from seqType. uppercasing in case it is
# also used as the human-readable name
my $st = $self->opt('seqType');
if( $st =~ /^[dr]na$/i ) {
return uc $st;
}
return lc $st;
}
sub exportFASTA {
my ( $self, $refs, $files ) = @_;
my $accept_ref = sub {1};
if( $refs ) {
$refs = { map $_ => 1, split /\s*,\s*/, $refs };
$accept_ref = sub { $refs->{$_[0]} };
}
my %refSeqs;
for my $fasta ( @$files ) {
my $gzip = $fasta =~ /\.gz(ip)?$/i ? ':gzip' : '';
my $fasta_fh;
if( ref $fasta ) {
$fasta_fh = $fasta;
} else {
open $fasta_fh, "<$gzip", $fasta or die "$! reading $fasta";
}
my $curr_seq;
my $curr_chunk;
my $chunk_num;
my $noseq = $self->opt('noseq');
my $writechunks = sub {
my $flush = shift;
return if $noseq;
while( $flush && $curr_chunk || length $curr_chunk >= $self->{chunkSize} ) {
$self->openChunkFile( $curr_seq, $chunk_num )
->print(
substr( $curr_chunk, 0, $self->{chunkSize}, '' ) #< shifts off the first part of the string
);
$chunk_num++;
}
};
local $_;
while ( <$fasta_fh> ) {
if ( /^\s*>\s*(\S+)\s*(.*)/ ) {
$writechunks->('flush') if $curr_seq;
if ( $accept_ref->($1) ) {
$chunk_num = 0;
$curr_chunk = '';
$curr_seq = $refSeqs{$1} = {
name => $1,
start => 0,
end => 0,
seqChunkSize => $self->{chunkSize},
$2 ? ( description => $2 ) : ()
};
} else {
undef $curr_seq;
}
} elsif ( $curr_seq && /\S/ ) {
s/[\s\r\n]//g;
$curr_seq->{end} += length;
unless( $noseq ) {
$curr_chunk .= $_;
$writechunks->();
}
}
}
$writechunks->('flush');
}
$self->writeRefSeqsJSON( \%refSeqs );
}
sub exportDB {
my ( $self, $db, $refs, $refseqs ) = @_;
my $compress = $self->opt('compress');
my %refSeqs = %$refseqs;
my %exportedRefSeqs;
my @queries;
if( my $reftypes = $self->opt('reftypes') ) {
if( $db->isa( 'Bio::DB::Das::Chado' ) ) {
die "--reftypes argument not supported when using the Bio::DB::Das::Chado adaptor\n";
}
push @queries, [ -type => [ split /[\s,]+/, $reftypes ] ];
}
if( ! @queries && ! defined $refs && $db->can('seq_ids') ) {
$refs = join ',', $db->seq_ids;
}
if ( defined $refs ) {
for my $ref (split ",", $refs) {
push @queries, [ -name => $ref ];
}
}
my $refCount = 0;
for my $query ( @queries ) {
my @segments = $db->isa('Bio::DB::Das::Chado') ? $db->segment( @$query ) : $db->features( @$query );
unless( @segments ) {
warn "WARNING: Reference sequence with @$query not found in input.\n";
next;
}
for my $seg ( @segments ) {
my $refInfo = {
name => $self->refName($seg),
start => $seg->start - 1,
end => $seg->end,
length => $seg->length
};
if ( $refSeqs{ $refInfo->{name} } ) {
warn "WARNING: multiple reference sequences found named '$refInfo->{name}', using only the first one.\n";
} else {
$refSeqs{ $refInfo->{name} } = $refInfo;
}
unless( $self->opt('noseq') || $exportedRefSeqs{ $refInfo->{name} }++ ) {
$self->exportSeqChunksFromDB( $refInfo, $self->{chunkSize}, $db,
[ -name => $refInfo->{name} ],
$seg->start, $seg->end);
$refSeqs{ $refInfo->{name}}{seqChunkSize} = $self->{chunkSize};
}
}
}
unless( %refSeqs ) {
warn "No reference sequences found, exiting.\n";
exit;
}
$self->writeRefSeqsJSON( \%refSeqs );
}
sub writeRefSeqsJSON {
my ( $self, $refseqs ) = @_;
mkpath( File::Spec->catdir($self->{storage}{outDir},'seq') );
$self->{storage}->modify( 'seq/refSeqs.json',
sub {
#add new ref seqs while keeping the order
#of the existing ref seqs
my $old = shift || [];
my %refs = %$refseqs;
for (my $i = 0; $i < @$old; $i++) {
if( $refs{$old->[$i]->{name}} ) {
$old->[$i] = delete $refs{$old->[$i]->{name}};
}
}
foreach my $name (sort keys %refs) {
if( not exists $refs{$name}{length} ) {
$refs{$name}{length} = $refs{$name}{end} - $refs{$name}{start};
}
push @{$old}, $refs{$name};
}
return $old;
});
if ( $self->opt('compress') ) {
# if we are compressing the sequence files, drop a .htaccess file
# in the seq/ dir that will automatically configure users with
# Apache (and AllowOverride on) to serve the .txt.gz files
# correctly
require GenomeDB;
my $hta = catfile( $self->opt('out'), 'seq', '.htaccess' );
open my $hta_fh, '>', $hta or die "$! writing $hta";
$hta_fh->print( GenomeDB->precompression_htaccess('.txtz','.jsonz') );
}
}
sub writeTrackEntry {
my ( $self ) = @_;
my $compress = $self->opt('compress');
my $seqTrackName = $self->trackLabel;
unless( $self->opt('noseq') ) {
$self->{storage}->touch( 'tracks.conf' );
$self->{storage}->modify( 'trackList.json',
sub {
my $trackList = shift;
unless (defined($trackList)) {
$trackList =
{
'formatVersion' => 1,
'tracks' => []
};
}
my $tracks = $trackList->{'tracks'};
my $i;
for ($i = 0; $i <= $#{$tracks}; $i++) {
last if ($tracks->[$i]->{'label'}
eq
$seqTrackName);
}
$tracks->[$i] =
{
'label' => $seqTrackName,
'key' => $self->opt('key') || 'Reference sequence',
'type' => "SequenceTrack",
'category' => "Reference sequence",
'storeClass' => 'JBrowse/Store/Sequence/StaticChunked',
'chunkSize' => $self->{chunkSize},
'urlTemplate' => $self->seqUrlTemplate,
( $compress ? ( 'compress' => 1 ): () ),
( 'dna' eq lc $self->opt('seqType') ? () : ('showReverseStrand' => 0 ) )
};
return $trackList;
});
}
return;
}
###########################
sub refName {
my ( $self, $seg ) = @_;
my $segName = $seg->name;
$segName = $seg->{'uniquename'} if $seg->{'uniquename'};
$segName =~ s/:.*$//; #get rid of coords if any
return $segName;
}
sub openChunkFile {
my ( $self, $refInfo, $chunkNum ) = @_;
my $compress = $self->opt('compress');
my ( $dir, $file ) = $self->opt('nohash')
# old style
? ( catdir( $self->opt('out'), 'seq',
$refInfo->{name}
),
"$chunkNum.txt"
)
# new hashed structure
: ( catdir( $self->opt('out'), 'seq',
$self->_crc32_path( $refInfo->{name} )
),
"$refInfo->{name}-$chunkNum.txt"
);
$file .= 'z' if $compress;
mkpath( $dir );
open my $fh, '>'.($compress ? ':gzip' : ''), catfile( $dir, $file )
or die "$! writing $file";
return $fh;
}
sub _crc32_path {
my ( $self, $str ) = @_;
my $crc = ( $self->{crc} ||= do { require Digest::Crc32; Digest::Crc32->new } )
->strcrc32( $str );
my $hex = lc sprintf( '%08x', $crc );
return catdir( $hex =~ /(.{1,3})/g );
}
sub seqUrlTemplate {
my ( $self ) = @_;
return $self->opt('nohash')
? "seq/{refseq}/" # old style
: "seq/{refseq_dirpath}/{refseq}-"; # new hashed structure
}
sub exportSeqChunksFromDB {
my ( $self, $refInfo, $chunkSize, $db, $segDef, $start, $end ) = @_;
$start = 1 if $start < 1;
$db->absolute( 1 ) if $db->can('absolute');
my $chunkStart = $start;
while( $chunkStart <= $end ) {
my $chunkEnd = $chunkStart + $chunkSize - 1;
$chunkEnd = $end if $chunkEnd > $end;
my $chunkNum = floor( ($chunkStart - 1) / $chunkSize );
my ($seg) = $db->segment( @$segDef,
-start => $chunkStart,
-end => $chunkEnd,
-absolute => 1,
);
unless( $seg ) {
die "Seq export query failed, please inform the developers of this error"
}
$seg->start == $chunkStart
or die "requested $chunkStart .. $chunkEnd; got " . $seg->start . " .. " . $seg->end;
$chunkStart = $chunkEnd + 1;
next unless $seg && $seg->seq && $seg->seq->seq;
$self->openChunkFile( $refInfo, $chunkNum )
->print( $seg->seq->seq );
}
}
1;
| 32.477124 | 121 | 0.403837 |
73d98f665029eda8a7f5a8ccb428f21a6d7339e9
| 1,729 |
pm
|
Perl
|
auto-lib/Paws/SDB/ReplaceableItem.pm
|
agimenez/aws-sdk-perl
|
9c4dff7d1af2ff0210c28ca44fb9e92bc625712b
|
[
"Apache-2.0"
] | null | null | null |
auto-lib/Paws/SDB/ReplaceableItem.pm
|
agimenez/aws-sdk-perl
|
9c4dff7d1af2ff0210c28ca44fb9e92bc625712b
|
[
"Apache-2.0"
] | null | null | null |
auto-lib/Paws/SDB/ReplaceableItem.pm
|
agimenez/aws-sdk-perl
|
9c4dff7d1af2ff0210c28ca44fb9e92bc625712b
|
[
"Apache-2.0"
] | null | null | null |
package Paws::SDB::ReplaceableItem;
use Moose;
has Attributes => (is => 'ro', isa => 'ArrayRef[Paws::SDB::ReplaceableAttribute]', request_name => 'Attribute', xmlname => 'Attribute', request_name => 'Attribute', traits => ['NameInRequest','Unwrapped','NameInRequest'], required => 1);
has Name => (is => 'ro', isa => 'Str', xmlname => 'ItemName', request_name => 'ItemName', traits => ['Unwrapped','NameInRequest'], required => 1);
1;
### main pod documentation begin ###
=head1 NAME
Paws::SDB::ReplaceableItem
=head1 USAGE
This class represents one of two things:
=head3 Arguments in a call to a service
Use the attributes of this class as arguments to methods. You shouldn't make instances of this class.
Each attribute should be used as a named argument in the calls that expect this type of object.
As an example, if Att1 is expected to be a Paws::SDB::ReplaceableItem object:
$service_obj->Method(Att1 => { Attributes => $value, ..., Name => $value });
=head3 Results returned from an API call
Use accessors for each attribute. If Att1 is expected to be an Paws::SDB::ReplaceableItem object:
$result = $service_obj->Method(...);
$result->Att1->Attributes
=head1 DESCRIPTION
This class has no description
=head1 ATTRIBUTES
=head2 B<REQUIRED> Attributes => ArrayRef[L<Paws::SDB::ReplaceableAttribute>]
The list of attributes for a replaceable item.
=head2 B<REQUIRED> Name => Str
The name of the replaceable item.
=head1 SEE ALSO
This class forms part of L<Paws>, describing an object used in L<Paws::SDB>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: https://github.com/pplu/aws-sdk-perl
Please report bugs to: https://github.com/pplu/aws-sdk-perl/issues
=cut
| 27.444444 | 239 | 0.718913 |
73f6c6a82563d5740b39959dd018139a6c255f2a
| 431 |
pl
|
Perl
|
perl/lib/unicore/lib/Nv/1_9.pl
|
JyothsnaMididoddi26/xampp
|
8f34d7fa7c2e6cc37fe4ece5e6886dc4e5c0757b
|
[
"Apache-2.0"
] | 1 |
2017-01-31T08:49:16.000Z
|
2017-01-31T08:49:16.000Z
|
xampp/perl/lib/unicore/lib/Nv/1_9.pl
|
silent88/Biographies-du-Fontenay
|
af4567cb6b78003daa72c37b5ac9f5611a360a9f
|
[
"MIT"
] | 2 |
2020-07-17T00:13:41.000Z
|
2021-05-08T17:01:54.000Z
|
perl/lib/unicore/lib/Nv/1_9.pl
|
Zolhyp/Plan
|
05dbf6a650cd54f855d1731dee70098c5c587339
|
[
"Apache-2.0"
] | null | null | null |
# !!!!!!! DO NOT EDIT THIS FILE !!!!!!!
# This file is machine-generated by mktables from the Unicode
# database, Version 6.1.0. Any changes made here will be lost!
# !!!!!!! INTERNAL PERL USE ONLY !!!!!!!
# This file is for internal use by core Perl only. The format and even the
# name or existence of this file are subject to change without notice. Don't
# use it directly.
return <<'END';
2151
END
| 30.785714 | 78 | 0.645012 |
73d5e9f64190348d8213862c92adb11d09723b1e
| 748 |
t
|
Perl
|
samples/client/petstore/perl/t/Model200ResponseTest.t
|
meadsteve/openapi-generator
|
e09d1bee43c317a8a856209cfc92ed8789a0c5f5
|
[
"Apache-2.0"
] | 4 |
2021-02-20T21:39:04.000Z
|
2021-08-24T13:54:15.000Z
|
samples/client/petstore/perl/t/Model200ResponseTest.t
|
meadsteve/openapi-generator
|
e09d1bee43c317a8a856209cfc92ed8789a0c5f5
|
[
"Apache-2.0"
] | 27 |
2021-04-07T07:30:36.000Z
|
2022-03-31T04:09:14.000Z
|
samples/client/petstore/perl/t/Model200ResponseTest.t
|
meadsteve/openapi-generator
|
e09d1bee43c317a8a856209cfc92ed8789a0c5f5
|
[
"Apache-2.0"
] | 4 |
2020-12-07T02:43:58.000Z
|
2020-12-07T10:23:39.000Z
|
=begin comment
OpenAPI Petstore
This spec is mainly for testing Petstore server and contains fake endpoints, models. Please do not use this for any other purpose. Special characters: \" \\
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
=end comment
=cut
#
# NOTE: This class is auto generated by the OpenAPI Generator
# Please update the test cases below to test the model.
# Ref: https://openapi-generator.tech
#
use Test::More tests => 2;
use Test::Exception;
use lib 'lib';
use strict;
use warnings;
use_ok('WWW::OpenAPIClient::Object::Model200Response');
my $instance = WWW::OpenAPIClient::Object::Model200Response->new();
isa_ok($instance, 'WWW::OpenAPIClient::Object::Model200Response');
| 22 | 156 | 0.75 |
ed6a31f50f03a6c2d20cee8cbe2599109b0db327
| 184 |
pm
|
Perl
|
lib/Acme/Flat/AV.pm
|
kentnl/Acme-Flat
|
c69abcb30d58a7a3d53a1def36a3c2bcee256976
|
[
"Artistic-1.0"
] | null | null | null |
lib/Acme/Flat/AV.pm
|
kentnl/Acme-Flat
|
c69abcb30d58a7a3d53a1def36a3c2bcee256976
|
[
"Artistic-1.0"
] | null | null | null |
lib/Acme/Flat/AV.pm
|
kentnl/Acme-Flat
|
c69abcb30d58a7a3d53a1def36a3c2bcee256976
|
[
"Artistic-1.0"
] | null | null | null |
use 5.006;
use strict;
use warnings;
package Acme::Flat::AV;
# ABSTRACT: An Array Value
our $VERSION = '0.001002';
# AUTHORITY
use parent 'Acme::Flat::PVMG';
use Class::Tiny;
1;
| 10.823529 | 30 | 0.673913 |
ed531136fc28d79834eefec1cd8a05906ba0c366
| 5,061 |
pm
|
Perl
|
Slim/Utils/Light.pm
|
yo61/slimserver
|
29c2ed4f8b3bff2b12df76a63d7268cdddf78e8e
|
[
"BSD-3-Clause"
] | null | null | null |
Slim/Utils/Light.pm
|
yo61/slimserver
|
29c2ed4f8b3bff2b12df76a63d7268cdddf78e8e
|
[
"BSD-3-Clause"
] | null | null | null |
Slim/Utils/Light.pm
|
yo61/slimserver
|
29c2ed4f8b3bff2b12df76a63d7268cdddf78e8e
|
[
"BSD-3-Clause"
] | null | null | null |
package Slim::Utils::Light;
# $Id: $
# Logitech Media Server Copyright 2001-2011 Logitech.
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License,
# version 2.
# This module provides some functions compatible with functions
# from the core Logitech Media Server code, without their overhead.
# These functions are called by helper applications like SqueezeTray
# or the control panel.
use Exporter::Lite;
@ISA = qw(Exporter);
use Config;
use FindBin qw($Bin);
use File::Spec::Functions qw(catfile catdir);
our @EXPORT = qw(string getPref);
my ($os, $language, %strings, $stringsLoaded);
BEGIN {
my @SlimINC = ();
# NB: The user may be on a platform who's perl reports a
# different x86 version than we've supplied - but it may work
# anyways.
my $arch = $Config::Config{'archname'};
$arch =~ s/^i[3456]86-/i386-/;
$arch =~ s/gnu-//;
my $perlmajorversion = $Config{'version'};
$perlmajorversion =~ s/\.\d+$//;
my $libPath = $Bin;
use Slim::Utils::OSDetect;
Slim::Utils::OSDetect::init();
if (my $libs = Slim::Utils::OSDetect::dirsFor('libpath')) {
# On Debian, RH and SUSE, our CPAN directory is located in the same dir as strings.txt
$libPath = $libs;
};
@SlimINC = (
catdir($libPath,'CPAN','arch',$perlmajorversion, $arch),
catdir($libPath,'CPAN','arch',$perlmajorversion, $arch, 'auto'),
catdir($libPath,'CPAN','arch',$Config{'version'}, $Config::Config{'archname'}),
catdir($libPath,'CPAN','arch',$Config{'version'}, $Config::Config{'archname'}, 'auto'),
catdir($libPath,'CPAN','arch',$perlmajorversion, $Config::Config{'archname'}),
catdir($libPath,'CPAN','arch',$perlmajorversion, $Config::Config{'archname'}, 'auto'),
catdir($libPath,'CPAN','arch',$Config::Config{'archname'}),
catdir($libPath,'lib'),
catdir($libPath,'CPAN'),
$libPath,
);
# This works like 'use lib'
# prepend our directories to @INC so we look there first.
unshift @INC, @SlimINC;
$os = Slim::Utils::OSDetect->getOS();
}
my ($serverPrefFile, $versionFile);
# return localised version of string token
sub string {
my $name = shift;
loadStrings() unless $stringsLoaded;
$language ||= getPref('language') || $os->getSystemLanguage();
my $lang = shift || $language;
my $string = $strings{ $name }->{ $lang } || $strings{ $name }->{ $language } || $strings{ $name }->{'EN'} || $name;
if ( @_ ) {
$string = sprintf( $string, @_ );
}
return $string;
}
sub loadStrings {
my $string = '';
my $language = '';
my $stringname = '';
# server string file
my $file;
# let's see whether this is a PerlApp/Tray compiled executable
if (defined $PerlApp::VERSION) {
$file = PerlApp::extract_bound_file('strings.txt');
}
elsif (defined $PerlTray::VERSION) {
$file = PerlTray::extract_bound_file('strings.txt');
}
# try to find the strings.txt file from our installation
unless ($file && -f $file) {
my $path = $os->dirsFor('strings');
$file = catdir($path, 'strings.txt');
}
open(STRINGS, "<:utf8", $file) || do {
warn "Couldn't open file [$file]!";
return;
};
foreach my $line (<STRINGS>) {
chomp($line);
next if $line =~ /^#/;
next if $line !~ /\S/;
if ($line =~ /^(\S+)$/) {
$stringname = $1;
$string = '';
next;
} elsif ($line =~ /^\t(\S*)\t(.+)$/) {
$language = uc($1);
$string = $2;
$strings{$stringname}->{$language} = $string;
}
}
close STRINGS;
$stringsLoaded = 1;
}
sub setString {
my ($stringname, $string) = @_;
loadStrings() unless $stringsLoaded;
$language ||= getPref('language') || $os->getSystemLanguage();
$strings{$stringname}->{$language} = $string;
}
# Read pref from the server preference file - lighter weight than loading YAML
# don't call this too often, it's in no way optimized for speed
sub getPref {
my $pref = shift;
my $prefFile = shift;
if ($prefFile) {
$prefFile = catdir($os->dirsFor('prefs'), 'plugin', $prefFile);
}
else {
$serverPrefFile ||= catfile( scalar($os->dirsFor('prefs')), 'server.prefs' );
$prefFile = $serverPrefFile;
}
require YAML::XS;
my $prefs = eval { YAML::XS::LoadFile($prefFile) };
my $ret;
if (!$@) {
$ret = $prefs->{$pref};
}
# if (-r $prefFile) {
#
# if (open(PREF, $prefFile)) {
#
# local $_;
# while (<PREF>) {
#
# # read YAML (server) and old style prefs (installer)
# if (/^$pref(:| \=)? (.+)$/) {
# $ret = $2;
# $ret =~ s/^['"]//;
# $ret =~ s/['"\s]*$//s;
# last;
# }
# }
#
# close(PREF);
# }
# }
return $ret;
}
sub checkForUpdate {
$versionFile ||= catfile( scalar($os->dirsFor('updates')), 'server.version' );
open(UPDATEFLAG, $versionFile) || return '';
my $installer = '';
local $_;
while ( <UPDATEFLAG> ) {
chomp;
if (/(?:LogitechMediaServer|Squeezebox|SqueezeCenter).*/i) {
$installer = $_;
last;
}
}
close UPDATEFLAG;
return $installer if ($installer && -r $installer);
}
sub resetUpdateCheck {
unlink $versionFile if $versionFile && -r $versionFile;
}
1;
| 22.004348 | 117 | 0.61806 |
ed77cc168af333488bb2aefca870d73167d29f92
| 126 |
t
|
Perl
|
test/error/JBackQuote1.t
|
rewriting/tom
|
2918e95c78006f08a2a0919ef440413fa5c2342a
|
[
"BSD-3-Clause"
] | 36 |
2016-02-19T12:09:49.000Z
|
2022-02-03T13:13:21.000Z
|
test/error/JBackQuote1.t
|
rewriting/tom
|
2918e95c78006f08a2a0919ef440413fa5c2342a
|
[
"BSD-3-Clause"
] | null | null | null |
test/error/JBackQuote1.t
|
rewriting/tom
|
2918e95c78006f08a2a0919ef440413fa5c2342a
|
[
"BSD-3-Clause"
] | 6 |
2017-11-30T17:07:10.000Z
|
2022-03-12T14:46:21.000Z
|
%include{ TNode.tom }
{
`XML(<A>
<B/>
</A>);
`XML(<A at1="foo" at2=x at3=dd("text")/>);
``toto
}
| 11.454545 | 44 | 0.380952 |
ed319d60df501356e88aa8be91a941573a1b5d0e
| 2,529 |
pm
|
Perl
|
src/util/t_array.pm
|
zapfbandit/krb5
|
02f6ef7257c2b2d5a2db3d570944ba3cfa9319ee
|
[
"MIT",
"Unlicense"
] | null | null | null |
src/util/t_array.pm
|
zapfbandit/krb5
|
02f6ef7257c2b2d5a2db3d570944ba3cfa9319ee
|
[
"MIT",
"Unlicense"
] | null | null | null |
src/util/t_array.pm
|
zapfbandit/krb5
|
02f6ef7257c2b2d5a2db3d570944ba3cfa9319ee
|
[
"MIT",
"Unlicense"
] | null | null | null |
package t_array;
use strict;
use vars qw(@ISA);
#require ktemplate;
require t_template;
@ISA=qw(t_template);
my @parms = qw(NAME TYPE);
my %defaults = ( );
my @templatelines = <DATA>;
sub new { # no args
my $self = {};
bless $self;
$self->init(\@parms, \%defaults, \@templatelines);
return $self;
}
__DATA__
/*
* array type, derived from template
*
* parameters:
* NAME: <NAME>
* TYPE: <TYPE>
*
* methods:
* int init() -> nonzero if fail initial allocation
* unsigned long size() -> nonnegative number of values stored
* int grow(newsize) -> negative if fail allocation, memset(,0,) new space
* <TYPE> *getaddr(idx) -> aborts if out of range
* void set(idx, value) -> aborts if out of range
* <TYPE> get(idx) -> value, or aborts if out of range
*/
#include <stdlib.h>
#include <errno.h>
#include <limits.h>
#include <string.h>
#ifdef HAVE_STDINT_H
# include <stdint.h>
#endif
struct <NAME>__header {
size_t allocated;
<TYPE> *elts;
};
typedef struct <NAME>__header <NAME>;
static inline int
<NAME>_init(<NAME> *arr)
{
arr->elts = calloc(10, sizeof(<TYPE>));
if (arr->elts == NULL)
return ENOMEM;
arr->allocated = 10;
return 0;
}
static inline long
<NAME>_size(<NAME> *arr)
{
return arr->allocated;
}
static inline long
<NAME>_max_size(<NAME> *arr)
{
size_t upper_bound;
upper_bound = SIZE_MAX / sizeof(*arr->elts);
if (upper_bound > LONG_MAX)
upper_bound = LONG_MAX;
return (long) upper_bound;
}
static inline int
<NAME>_grow(<NAME> *arr, unsigned long newcount)
{
size_t oldsize = sizeof(*arr->elts) * arr->allocated;
size_t newsize;
void *ptr;
if (newcount > LONG_MAX)
return -1;
if (newcount < arr->allocated)
return 0;
if (newcount > <NAME>_max_size(arr))
return -1;
newsize = sizeof(*arr->elts) * newcount;
ptr = realloc(arr->elts, newsize);
if (ptr == NULL)
return -1;
memset((char *)ptr + oldsize, 0, newsize - oldsize);
arr->elts = ptr;
arr->allocated = newcount;
return 0;
}
static inline <TYPE> *
<NAME>_getaddr (<NAME> *arr, long idx)
{
if (idx < 0 || idx >= arr->allocated)
abort();
return arr->elts + idx;
}
static inline void
<NAME>_set (<NAME> *arr, long idx, <TYPE> value)
{
<TYPE> *newvalp;
newvalp = <NAME>_getaddr(arr, idx);
*newvalp = value;
}
static inline <TYPE>
<NAME>_get (<NAME> *arr, long idx)
{
return *<NAME>_getaddr(arr, idx);
}
static inline void
<NAME>_destroy (<NAME> *arr)
{
free(arr->elts);
arr->elts = 0;
}
| 19.015038 | 74 | 0.631475 |
ed7b70350f92172ef7ec12da1ea47c364be84e87
| 3,527 |
pm
|
Perl
|
modules/Bio/EnsEMBL/Compara/RunnableDB/GenomicAlignBlock/SetGerpNeutralRate.pm
|
manuelcarbajo/ensembl-compara
|
0ffe653215a20e6921c5f4983ea9e4755593a491
|
[
"Apache-2.0"
] | null | null | null |
modules/Bio/EnsEMBL/Compara/RunnableDB/GenomicAlignBlock/SetGerpNeutralRate.pm
|
manuelcarbajo/ensembl-compara
|
0ffe653215a20e6921c5f4983ea9e4755593a491
|
[
"Apache-2.0"
] | null | null | null |
modules/Bio/EnsEMBL/Compara/RunnableDB/GenomicAlignBlock/SetGerpNeutralRate.pm
|
manuelcarbajo/ensembl-compara
|
0ffe653215a20e6921c5f4983ea9e4755593a491
|
[
"Apache-2.0"
] | null | null | null |
=head1 LICENSE
Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Copyright [2016-2020] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
=head1 CONTACT
Please email comments or questions to the public Ensembl
developers list at <http://lists.ensembl.org/mailman/listinfo/dev>.
Questions may also be sent to the Ensembl help desk at
<http://www.ensembl.org/Help/Contact>.
=head1 NAME
Bio::EnsEMBL::Compara::RunnableDB::GenomicAlignBlock::SetGerpNeutralRate
=head1 DESCRIPTION
Calculate the neutral rate of the species tree for use for those alignments
where the default depth threshold is too high to call any constrained
elements (e.g. 3-way birds).
The Runnable stores the depth threshold as the "depth_threshold"
pipeline-wide parameter. It can be overriden by setting its
'requested_depth_threshold' parameter.
=head1 APPENDIX
The rest of the documentation details each of the object methods.
Internal methods are usually preceded with a _
=cut
package Bio::EnsEMBL::Compara::RunnableDB::GenomicAlignBlock::SetGerpNeutralRate;
use strict;
use warnings;
use base ('Bio::EnsEMBL::Compara::RunnableDB::BaseRunnable');
sub param_defaults {
my ($self) = @_;
return {
%{ $self->SUPER::param_defaults },
'requested_depth_threshold' => undef,
};
}
sub fetch_input {
my( $self) = @_;
if (defined $self->param('requested_depth_threshold')) {
$self->param('computed_depth_threshold', $self->param('requested_depth_threshold'));
return;
}
my $mlss = $self->compara_dba->get_MethodLinkSpeciesSetAdaptor->fetch_by_dbID($self->param_required('mlss_id'));
if (($mlss->name =~ /(sauropsid|bird|plant|rice)/i) || ($self->dbc && ($self->dbc->dbname =~ /(sauropsid|bird|plant|rice)/i))) {
# A bit of institutional knowledge. This value was found to be
# better years ago, at at time we only had 3 birds
# MM: in Mar 2019, on the 34-sauropsids alignment, this threshold
# helps tagging 5-20% more of the genome as conserved on 10
# species. For the other 24 species, the CEs are ~5% shorter but
# there are ~5% less of them, so no overall difference.
$self->param('computed_depth_threshold', '0.35');
return;
}
my $neutral_rate = 0;
foreach my $node ($mlss->species_tree->root->get_all_subnodes) {
$neutral_rate += $node->distance_to_parent;
}
my $default_depth_threshold = 0.5;
if ($neutral_rate < $default_depth_threshold) {
$self->param('computed_depth_threshold', $neutral_rate);
} else {
$self->param('computed_depth_threshold', undef);
}
}
sub write_output {
my ($self) = @_;
if (defined $self->param('computed_depth_threshold')) {
$self->dataflow_output_id({
'param_name' => 'depth_threshold',
'param_value' => $self->param('computed_depth_threshold'),
}, 2);
}
}
1;
| 30.938596 | 132 | 0.698611 |
ed4306c575789ede30f304999620bfeab9fdb003
| 4,296 |
pl
|
Perl
|
Dataset_Insertion_Process/0-selection_20000/0-selection_20000.pl
|
inralpgp/fishandchips
|
9bb7010e89ca383c38f986270214f450c3ef74e6
|
[
"Apache-2.0"
] | null | null | null |
Dataset_Insertion_Process/0-selection_20000/0-selection_20000.pl
|
inralpgp/fishandchips
|
9bb7010e89ca383c38f986270214f450c3ef74e6
|
[
"Apache-2.0"
] | null | null | null |
Dataset_Insertion_Process/0-selection_20000/0-selection_20000.pl
|
inralpgp/fishandchips
|
9bb7010e89ca383c38f986270214f450c3ef74e6
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/perl
# Copyright {2017} INRA (Institut National de Recherche Agronomique - FRANCE)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
my $VERSION = '0.1';
my $lastModif = '12April2017';
use strict;
use warnings;
use DBI;
use Bio::SeqIO;
use Bio::SeqFeature::Generic;
use Statistics::Descriptive;
use Data::Dumper;
use File::Basename;
use Getopt::Long;
###############################
#Custom Module
use lib '/usr/lib/perl5';
###############################
my $datasetlist= $ARGV[0];
my $directory= $ARGV[1];
GetOptions("h|help" => \&help);
&main($datasetlist,$directory);
#*********************************************************************************************
#*********************************** MAIN ******************************************
# ~/bin/dev/textMining_V5.pl -p 50 -s datasets.txt -dir ../WorkingDirectoryInsertionProcess/
#*********************************************************************************************
sub main {
my $self = {};
bless $self;
my $dataset_list = shift;
my $dir = shift;
$self->{param}->{datasets}= $dataset_list;
$self->{param}->{directory}= $dir;
my @files = () ;
open(GO, $dataset_list) or die "impossible d'ouvrir $dataset_list ! \n" ;
while (<GO>){
if($_=~ m/^#/){
next;
}
else{
chomp;
$_ =~ /([^\t]*)\t[^\t]*\t[^\t]*.*/ ;
push(@files, "$1");
#$1 : SRP
#$2 : study
#$3 : specie
}
}
close GO ;
foreach my $file (@files){
chomp$file;
my $srp=$file ;
$self->{currentSRP}=$srp;
#$gse=~s/(-A-.+)|(-GPL.+)//;
#$gse=~s/a$|b$|c$|d$//;
$self->{InfosFile} = $dir."/".$self->{currentSRP}."/Assembly_Trinity/".$self->{currentSRP}."_genes_counts.TMM.fpkm.matrix";
$self->readFileInfo();
my $wanted=20000;
my $nbTranscrits=scalar(keys(%{$self->{Final}}));
if($nbTranscrits<20000){
$wanted=$nbTranscrits;
}
my $newfile= $self->{param}->{directory}."/".$self->{currentSRP}."/".$self->{currentSRP}."/".$self->{currentSRP}."_20000genes_counts.TMM.fpkm.matrix";
open (FILE2,">$newfile");
my $m=0;
foreach my $value ( sort {$b<=> $a} keys %{$self->{Final}} ) {
if($m<$wanted){
$m+=1
}
}
close FILE2;
}
}
#*********************************************************************************************
#*********************************************************************************************
#*********************************************************************************************
###
### read file describing the samples, conditions, terms associates to data mining
###
sub readFileInfo{
my $self = shift;
#~ -e $self->{InfosFile} or -e $self->{InfosFile} or $logger->logdie("Cannot find file: ".$self->{InfosFile}."\n");
open (FILE, "<$self->{InfosFile}");
my $n=0;
$self->{CountAllTerms}=0;
while (<FILE>){
chomp;
if ($_ eq "" || $_ =~ m/^ /){ ### ne prends pas en compte les lignes vides
$self->{Header}=$_;
}
else{
my @infos=split("\t",$_);
my $gene= $infos[0];
my $nubVal=(scalar(@infos))-1;
my $moySum=0;
foreach ( my $i=1 ; $i < scalar(@infos) ; $i+=1){
$moySum+=$infos[$i];
}
my $moyenne=$moySum/$nubVal;
my $somme=0;
foreach ( my $j=1 ; $j < scalar(@infos) ; $j+=1){
$somme+=(($infos[$j]-$moyenne)*($infos[$j]-$moyenne));
}
my $ecart=sqrt($somme/16);
$self->{Final}->{$ecart}=$_;
}
}
close FILE;
return 1;
}
sub help {
my $prog = basename($0) ;
print STDERR <<EOF ;
#### $prog ####
# AUTHOR: Ambre-Aurore Josselin
# VERSION: $VERSION - $lastModif
# PURPOSE:
USAGE:
$prog [OPTIONS]
### OPTIONS ###
-v, --versbosity <integer> mode of verbosity (1-4) [default: 1]
-h, --help print this help
EOF
exit(1) ;
}
__END__
| 24.548571 | 152 | 0.506052 |
ed2b0e4e96230ffabb35079f3865b10b7de04326
| 1,294 |
pl
|
Perl
|
test2.pl
|
Stelioschatzichronis/Perl_scripting
|
6592ea9b0d7465eb56028e7fcd0bba440d1842a6
|
[
"MIT"
] | null | null | null |
test2.pl
|
Stelioschatzichronis/Perl_scripting
|
6592ea9b0d7465eb56028e7fcd0bba440d1842a6
|
[
"MIT"
] | null | null | null |
test2.pl
|
Stelioschatzichronis/Perl_scripting
|
6592ea9b0d7465eb56028e7fcd0bba440d1842a6
|
[
"MIT"
] | null | null | null |
use strict;
use warnings;
use Parallel::ForkManager;
use Data::Dumper qw(Dumper);
#my $forks = shift or die "Usage: $0 N\n";
my @numbers = (1,2,3,4,5,6,7,3.5);
my %results;
my $MAX_PROCESSES=4;
#print "Forking up to $forks at a time\n";
#my $pm = Parallel::ForkManager->new($forks);
my $pm = new Parallel::ForkManager($MAX_PROCESSES);
$pm->run_on_finish( sub {
my ($pid, $exit_code, $ident, $exit_signal, $core_dump, $data_structure_reference) = @_;
my $q = $data_structure_reference->{input};
my $p = $data_structure_reference->{input2};
$results{$q}{$p} = $data_structure_reference->{result};
});
my $p =0;
foreach my $q (@numbers) {
my $pid = $pm->start and next;
my $res = calc($q);
$pm->finish(0, { result => $res, input => $q, input2 => $p });
}
$pm->wait_all_children;
print Dumper \%results;
print "\n";
print "\n";
#my %new_hash = %{$pm};
foreach my $counter (keys %results){
foreach my $name (keys %{$results{$counter}}){
print "{".$counter."} {".$name."} ".$results{$counter}{$name}."\n";
}
}
sub calc {
my ($n) = @_;
my $sum = 0;
for (1 .. $n) {
$sum += 3;
}
return $sum;
}
| 25.372549 | 96 | 0.530139 |
ed782a141655b331652b4c13bd3711905a6512b0
| 6,804 |
pm
|
Perl
|
auto-lib/Paws/CostExplorer/GetCostAndUsageWithResources.pm
|
shogo82148/aws-sdk-perl
|
a87555a9d30dd1415235ebacd2715b2f7e5163c7
|
[
"Apache-2.0"
] | null | null | null |
auto-lib/Paws/CostExplorer/GetCostAndUsageWithResources.pm
|
shogo82148/aws-sdk-perl
|
a87555a9d30dd1415235ebacd2715b2f7e5163c7
|
[
"Apache-2.0"
] | null | null | null |
auto-lib/Paws/CostExplorer/GetCostAndUsageWithResources.pm
|
shogo82148/aws-sdk-perl
|
a87555a9d30dd1415235ebacd2715b2f7e5163c7
|
[
"Apache-2.0"
] | null | null | null |
package Paws::CostExplorer::GetCostAndUsageWithResources;
use Moose;
has Filter => (is => 'ro', isa => 'Paws::CostExplorer::Expression');
has Granularity => (is => 'ro', isa => 'Str');
has GroupBy => (is => 'ro', isa => 'ArrayRef[Paws::CostExplorer::GroupDefinition]');
has Metrics => (is => 'ro', isa => 'ArrayRef[Str|Undef]');
has NextPageToken => (is => 'ro', isa => 'Str');
has TimePeriod => (is => 'ro', isa => 'Paws::CostExplorer::DateInterval', required => 1);
use MooseX::ClassAttribute;
class_has _api_call => (isa => 'Str', is => 'ro', default => 'GetCostAndUsageWithResources');
class_has _returns => (isa => 'Str', is => 'ro', default => 'Paws::CostExplorer::GetCostAndUsageWithResourcesResponse');
class_has _result_key => (isa => 'Str', is => 'ro');
1;
### main pod documentation begin ###
=head1 NAME
Paws::CostExplorer::GetCostAndUsageWithResources - Arguments for method GetCostAndUsageWithResources on L<Paws::CostExplorer>
=head1 DESCRIPTION
This class represents the parameters used for calling the method GetCostAndUsageWithResources on the
L<AWS Cost Explorer Service|Paws::CostExplorer> service. Use the attributes of this class
as arguments to method GetCostAndUsageWithResources.
You shouldn't make instances of this class. Each attribute should be used as a named argument in the call to GetCostAndUsageWithResources.
=head1 SYNOPSIS
my $ce = Paws->service('CostExplorer');
my $GetCostAndUsageWithResourcesResponse =
$ce->GetCostAndUsageWithResources(
TimePeriod => {
End => 'MyYearMonthDay',
Start => 'MyYearMonthDay',
},
Filter => {
And => [ <Expression>, ... ], # OPTIONAL
CostCategories => {
Key => 'MyCostCategoryName', # min: 1, max: 255; OPTIONAL
Values => [ 'MyValue', ... ], # OPTIONAL
}, # OPTIONAL
Dimensions => {
Key => 'AZ'
, # values: AZ, INSTANCE_TYPE, LINKED_ACCOUNT, OPERATION, PURCHASE_TYPE, REGION, SERVICE, USAGE_TYPE, USAGE_TYPE_GROUP, RECORD_TYPE, OPERATING_SYSTEM, TENANCY, SCOPE, PLATFORM, SUBSCRIPTION_ID, LEGAL_ENTITY_NAME, DEPLOYMENT_OPTION, DATABASE_ENGINE, CACHE_ENGINE, INSTANCE_TYPE_FAMILY, BILLING_ENTITY, RESERVATION_ID, RESOURCE_ID, RIGHTSIZING_TYPE, SAVINGS_PLANS_TYPE, SAVINGS_PLAN_ARN, PAYMENT_OPTION; OPTIONAL
Values => [ 'MyValue', ... ], # OPTIONAL
}, # OPTIONAL
Not => <Expression>,
Or => [ <Expression>, ... ], # OPTIONAL
Tags => {
Key => 'MyTagKey', # OPTIONAL
Values => [ 'MyValue', ... ], # OPTIONAL
}, # OPTIONAL
}, # OPTIONAL
Granularity => 'DAILY', # OPTIONAL
GroupBy => [
{
Key => 'MyGroupDefinitionKey', # OPTIONAL
Type => 'DIMENSION', # values: DIMENSION, TAG, COST_CATEGORY; OPTIONAL
},
...
], # OPTIONAL
Metrics => [ 'MyMetricName', ... ], # OPTIONAL
NextPageToken => 'MyNextPageToken', # OPTIONAL
);
# Results:
my $GroupDefinitions =
$GetCostAndUsageWithResourcesResponse->GroupDefinitions;
my $NextPageToken = $GetCostAndUsageWithResourcesResponse->NextPageToken;
my $ResultsByTime = $GetCostAndUsageWithResourcesResponse->ResultsByTime;
# Returns a L<Paws::CostExplorer::GetCostAndUsageWithResourcesResponse> object.
Values for attributes that are native types (Int, String, Float, etc) can passed as-is (scalar values). Values for complex Types (objects) can be passed as a HashRef. The keys and values of the hashref will be used to instance the underlying object.
For the AWS API documentation, see L<https://docs.aws.amazon.com/goto/WebAPI/ce/GetCostAndUsageWithResources>
=head1 ATTRIBUTES
=head2 Filter => L<Paws::CostExplorer::Expression>
Filters Amazon Web Services costs by different dimensions. For example,
you can specify C<SERVICE> and C<LINKED_ACCOUNT> and get the costs that
are associated with that account's usage of that service. You can nest
C<Expression> objects to define any combination of dimension filters.
For more information, see Expression
(http://docs.aws.amazon.com/aws-cost-management/latest/APIReference/API_Expression.html).
The C<GetCostAndUsageWithResources> operation requires that you either
group by or filter by a C<ResourceId>.
=head2 Granularity => Str
Sets the AWS cost granularity to C<MONTHLY>, C<DAILY>, or C<HOURLY>. If
C<Granularity> isn't set, the response object doesn't include the
C<Granularity>, C<MONTHLY>, C<DAILY>, or C<HOURLY>.
Valid values are: C<"DAILY">, C<"MONTHLY">, C<"HOURLY">
=head2 GroupBy => ArrayRef[L<Paws::CostExplorer::GroupDefinition>]
You can group Amazon Web Services costs using up to two different
groups: either dimensions, tag keys, or both.
=head2 Metrics => ArrayRef[Str|Undef]
Which metrics are returned in the query. For more information about
blended and unblended rates, see Why does the "blended" annotation
appear on some line items in my bill?
(https://aws.amazon.com/premiumsupport/knowledge-center/blended-rates-intro/).
Valid values are C<AmortizedCost>, C<BlendedCost>, C<NetAmortizedCost>,
C<NetUnblendedCost>, C<NormalizedUsageAmount>, C<UnblendedCost>, and
C<UsageQuantity>.
If you return the C<UsageQuantity> metric, the service aggregates all
usage numbers without taking the units into account. For example, if
you aggregate C<usageQuantity> across all of Amazon EC2, the results
aren't meaningful because Amazon EC2 compute hours and data transfer
are measured in different units (for example, hours vs. GB). To get
more meaningful C<UsageQuantity> metrics, filter by C<UsageType> or
C<UsageTypeGroups>.
C<Metrics> is required for C<GetCostAndUsageWithResources> requests.
=head2 NextPageToken => Str
The token to retrieve the next set of results. AWS provides the token
when the response from a previous call has more results than the
maximum page size.
=head2 B<REQUIRED> TimePeriod => L<Paws::CostExplorer::DateInterval>
Sets the start and end dates for retrieving Amazon Web Services costs.
The range must be within the last 14 days (the start date cannot be
earlier than 14 days ago). The start date is inclusive, but the end
date is exclusive. For example, if C<start> is C<2017-01-01> and C<end>
is C<2017-05-01>, then the cost and usage data is retrieved from
C<2017-01-01> up to and including C<2017-04-30> but not including
C<2017-05-01>.
=head1 SEE ALSO
This class forms part of L<Paws>, documenting arguments for method GetCostAndUsageWithResources in L<Paws::CostExplorer>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: L<https://github.com/pplu/aws-sdk-perl>
Please report bugs to: L<https://github.com/pplu/aws-sdk-perl/issues>
=cut
| 39.789474 | 420 | 0.708407 |
73d831c8894ca0271f403ebc6cbd4d750c3c65b0
| 1,301 |
pl
|
Perl
|
tools/log2dis.pl
|
kuopinghsu/simple-riscv
|
9ad046187e816b8e28e9fdbff91c2d4255672b56
|
[
"MIT"
] | 14 |
2020-06-17T05:23:30.000Z
|
2020-09-20T01:10:38.000Z
|
tools/log2dis.pl
|
kuopinghsu/simple-riscv
|
9ad046187e816b8e28e9fdbff91c2d4255672b56
|
[
"MIT"
] | 2 |
2020-09-18T09:46:36.000Z
|
2020-09-22T13:18:14.000Z
|
tools/log2dis.pl
|
kuopinghsu/simple-riscv
|
9ad046187e816b8e28e9fdbff91c2d4255672b56
|
[
"MIT"
] | 2 |
2020-09-18T08:31:26.000Z
|
2020-09-20T03:48:02.000Z
|
#!/usr/bin/perl -w
use strict;
my $VERBOSE = 1;
my $CROSS_COMPILER = defined($ENV{'CROSS_COMPILER'}) ? $ENV{'CROSS_COMPILER'} : "riscv64-unknown-elf-";
my $objdump = "${CROSS_COMPILER}objdump";
my %DIS;
$| = 1;
if ($#ARGV == 2 && $ARGV[0] eq "-q") {
$VERBOSE = 0;
shift;
}
if ($#ARGV != 1) {
print "Usage: log2dis.pl [-q] trace.log file.elf\n";
exit -1;
}
open(FH, "$objdump -d $ARGV[1]|") || die "can not open file $ARGV[1]";
while(<FH>) {
if (/^\s+([0-9a-fA-F]+):\s+([0-9a-fA-F]+)\s+(.+)$/) {
my $addr = sprintf "%08x", hex($1);
$DIS{$addr} = $3;
}
}
close (FH);
open(FH, "< $ARGV[0]") || die "can not open file $ARGV[0]";
open(FO, "> $ARGV[0].dis") || die "can not open file $ARGV[0].dis";
my $line = 0;
while(<FH>) {
chomp;
if (/^\s+(\d+)\s+([0-9a-fA-F]+)/) {
my $addr = sprintf "%08x", hex($2);
my $len = length($_);
my $space = " ";
if ($len < 74) {
$space = " "x(74-$len);
}
if (exists($DIS{$addr})) {
printf FO "$_%s; %s\n", $space, $DIS{$addr};
} else {
printf FO "$_\n";
}
$line++;
printf "." if ($VERBOSE == 1 && ($line % 100000) == 0);
}
}
printf "Done.\n" if ($VERBOSE == 1);
close(FH);
close(FO);
exit(0);
| 20.983871 | 103 | 0.460415 |
ed6459fbc8f11a1dd7165f07b81175602b1a3a47
| 994 |
pl
|
Perl
|
prolog/hanoi/hanoi.pl
|
rla/old-code
|
06aa69c3adef8434992410687d466dc42779e57b
|
[
"Ruby",
"MIT"
] | 2 |
2015-11-08T10:01:47.000Z
|
2020-03-10T00:00:58.000Z
|
prolog/hanoi/hanoi.pl
|
rla/old-code
|
06aa69c3adef8434992410687d466dc42779e57b
|
[
"Ruby",
"MIT"
] | null | null | null |
prolog/hanoi/hanoi.pl
|
rla/old-code
|
06aa69c3adef8434992410687d466dc42779e57b
|
[
"Ruby",
"MIT"
] | null | null | null |
% Hanoi tornide lahendus
% "The Art of Prolog" järgi
% Raivo Laanemets, suvi 2006
:-op(1200, xfy, to).
% Tornide lahendusalgoritm.
% Rekursiooni baasjuhtum, 1 ketta korral
% liigutame kohe selle ketta teisele pulgale,
% kasutamata vahepulka.
hanoi(1, A, B, _, [A to B]).
% Rekursiooni samm. N ketta liigutamiseks
% liigutame kõigepealt N-1 alumist ketast
% ja seejärel tõstame ümber viimase ketta.
% A - lähtepulk
% B - lõpp-pulk
% C - abipulk.
hanoi(N, A, B, C, Moves):-
N1 is N-1,
hanoi(N1, A, C, B, Ms1), % Tõstame esimeselt pulgalt abipulgale.
hanoi(N1, C, B, A, Ms2), % Tõstame abipulgalt teisele pulgale.
append(Ms1, [A to B|Ms2], Moves).
hanoi(N):-
hanoi(N, a, c, b, Moves),
write_moves(Moves, 0).
% Käikude väljakirjutamine.
write_moves([M|Moves], N):-
write(M), nl,
N1 is N+1,
((N1 > 20) -> (wait, write_moves(Moves, 0)); write_moves(Moves, N1)).
write_moves([], _).
% Ootab kasutajalt klahvivajutust
% "paged" väljundi saamiseks.
wait:- get_char(_).
| 22.088889 | 71 | 0.672032 |
ed4da209bf3dc28c27166840e78f41d9b94c4acc
| 8,247 |
pl
|
Perl
|
external/win_perl/lib/unicore/lib/Gc/Ll.pl
|
phixion/l0phtcrack
|
48ee2f711134e178dbedbd925640f6b3b663fbb5
|
[
"Apache-2.0",
"MIT"
] | 2 |
2021-10-20T00:25:39.000Z
|
2021-11-08T12:52:42.000Z
|
external/win_perl/lib/unicore/lib/Gc/Ll.pl
|
Brute-f0rce/l0phtcrack
|
25f681c07828e5e68e0dd788d84cc13c154aed3d
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
external/win_perl/lib/unicore/lib/Gc/Ll.pl
|
Brute-f0rce/l0phtcrack
|
25f681c07828e5e68e0dd788d84cc13c154aed3d
|
[
"Apache-2.0",
"MIT"
] | 1 |
2022-03-14T06:41:16.000Z
|
2022-03-14T06:41:16.000Z
|
# !!!!!!! DO NOT EDIT THIS FILE !!!!!!!
# This file is machine-generated by ..\lib\unicore\mktables from the Unicode
# database, Version 9.0.0. Any changes made here will be lost!
# !!!!!!! INTERNAL PERL USE ONLY !!!!!!!
# This file is for internal use by core Perl only. The format and even the
# name or existence of this file are subject to change without notice. Don't
# use it directly. Use Unicode::UCD to access the Unicode character data
# base.
return <<'END';
V1266
97
123
181
182
223
247
248
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
378
379
380
381
382
385
387
388
389
390
392
393
396
398
402
403
405
406
409
412
414
415
417
418
419
420
421
422
424
425
426
428
429
430
432
433
436
437
438
439
441
443
445
448
454
455
457
458
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
497
499
500
501
502
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
570
572
573
575
577
578
579
583
584
585
586
587
588
589
590
591
660
661
688
881
882
883
884
887
888
891
894
912
913
940
975
976
978
981
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1012
1013
1014
1016
1017
1019
1021
1072
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1163
1164
1165
1166
1167
1168
1169
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
1188
1189
1190
1191
1192
1193
1194
1195
1196
1197
1198
1199
1200
1201
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227
1228
1229
1230
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246
1247
1248
1249
1250
1251
1252
1253
1254
1255
1256
1257
1258
1259
1260
1261
1262
1263
1264
1265
1266
1267
1268
1269
1270
1271
1272
1273
1274
1275
1276
1277
1278
1279
1280
1281
1282
1283
1284
1285
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296
1297
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308
1309
1310
1311
1312
1313
1314
1315
1316
1317
1318
1319
1320
1321
1322
1323
1324
1325
1326
1327
1328
1377
1416
5112
5118
7296
7305
7424
7468
7531
7544
7545
7579
7681
7682
7683
7684
7685
7686
7687
7688
7689
7690
7691
7692
7693
7694
7695
7696
7697
7698
7699
7700
7701
7702
7703
7704
7705
7706
7707
7708
7709
7710
7711
7712
7713
7714
7715
7716
7717
7718
7719
7720
7721
7722
7723
7724
7725
7726
7727
7728
7729
7730
7731
7732
7733
7734
7735
7736
7737
7738
7739
7740
7741
7742
7743
7744
7745
7746
7747
7748
7749
7750
7751
7752
7753
7754
7755
7756
7757
7758
7759
7760
7761
7762
7763
7764
7765
7766
7767
7768
7769
7770
7771
7772
7773
7774
7775
7776
7777
7778
7779
7780
7781
7782
7783
7784
7785
7786
7787
7788
7789
7790
7791
7792
7793
7794
7795
7796
7797
7798
7799
7800
7801
7802
7803
7804
7805
7806
7807
7808
7809
7810
7811
7812
7813
7814
7815
7816
7817
7818
7819
7820
7821
7822
7823
7824
7825
7826
7827
7828
7829
7838
7839
7840
7841
7842
7843
7844
7845
7846
7847
7848
7849
7850
7851
7852
7853
7854
7855
7856
7857
7858
7859
7860
7861
7862
7863
7864
7865
7866
7867
7868
7869
7870
7871
7872
7873
7874
7875
7876
7877
7878
7879
7880
7881
7882
7883
7884
7885
7886
7887
7888
7889
7890
7891
7892
7893
7894
7895
7896
7897
7898
7899
7900
7901
7902
7903
7904
7905
7906
7907
7908
7909
7910
7911
7912
7913
7914
7915
7916
7917
7918
7919
7920
7921
7922
7923
7924
7925
7926
7927
7928
7929
7930
7931
7932
7933
7934
7935
7944
7952
7958
7968
7976
7984
7992
8000
8006
8016
8024
8032
8040
8048
8062
8064
8072
8080
8088
8096
8104
8112
8117
8118
8120
8126
8127
8130
8133
8134
8136
8144
8148
8150
8152
8160
8168
8178
8181
8182
8184
8458
8459
8462
8464
8467
8468
8495
8496
8500
8501
8505
8506
8508
8510
8518
8522
8526
8527
8580
8581
11312
11359
11361
11362
11365
11367
11368
11369
11370
11371
11372
11373
11377
11378
11379
11381
11382
11388
11393
11394
11395
11396
11397
11398
11399
11400
11401
11402
11403
11404
11405
11406
11407
11408
11409
11410
11411
11412
11413
11414
11415
11416
11417
11418
11419
11420
11421
11422
11423
11424
11425
11426
11427
11428
11429
11430
11431
11432
11433
11434
11435
11436
11437
11438
11439
11440
11441
11442
11443
11444
11445
11446
11447
11448
11449
11450
11451
11452
11453
11454
11455
11456
11457
11458
11459
11460
11461
11462
11463
11464
11465
11466
11467
11468
11469
11470
11471
11472
11473
11474
11475
11476
11477
11478
11479
11480
11481
11482
11483
11484
11485
11486
11487
11488
11489
11490
11491
11493
11500
11501
11502
11503
11507
11508
11520
11558
11559
11560
11565
11566
42561
42562
42563
42564
42565
42566
42567
42568
42569
42570
42571
42572
42573
42574
42575
42576
42577
42578
42579
42580
42581
42582
42583
42584
42585
42586
42587
42588
42589
42590
42591
42592
42593
42594
42595
42596
42597
42598
42599
42600
42601
42602
42603
42604
42605
42606
42625
42626
42627
42628
42629
42630
42631
42632
42633
42634
42635
42636
42637
42638
42639
42640
42641
42642
42643
42644
42645
42646
42647
42648
42649
42650
42651
42652
42787
42788
42789
42790
42791
42792
42793
42794
42795
42796
42797
42798
42799
42802
42803
42804
42805
42806
42807
42808
42809
42810
42811
42812
42813
42814
42815
42816
42817
42818
42819
42820
42821
42822
42823
42824
42825
42826
42827
42828
42829
42830
42831
42832
42833
42834
42835
42836
42837
42838
42839
42840
42841
42842
42843
42844
42845
42846
42847
42848
42849
42850
42851
42852
42853
42854
42855
42856
42857
42858
42859
42860
42861
42862
42863
42864
42865
42873
42874
42875
42876
42877
42879
42880
42881
42882
42883
42884
42885
42886
42887
42888
42892
42893
42894
42895
42897
42898
42899
42902
42903
42904
42905
42906
42907
42908
42909
42910
42911
42912
42913
42914
42915
42916
42917
42918
42919
42920
42921
42922
42933
42934
42935
42936
43002
43003
43824
43867
43872
43878
43888
43968
64256
64263
64275
64280
65345
65371
66600
66640
66776
66812
68800
68851
71872
71904
119834
119860
119886
119893
119894
119912
119938
119964
119990
119994
119995
119996
119997
120004
120005
120016
120042
120068
120094
120120
120146
120172
120198
120224
120250
120276
120302
120328
120354
120380
120406
120432
120458
120486
120514
120539
120540
120546
120572
120597
120598
120604
120630
120655
120656
120662
120688
120713
120714
120720
120746
120771
120772
120778
120779
120780
125218
125252
END
| 6.437939 | 78 | 0.672123 |
ed0bf0cc40df56f51942042a4e809e81406f8b26
| 6,167 |
pm
|
Perl
|
auto-lib/Paws/SavingsPlans/DescribeSavingsPlansOfferings.pm
|
0leksii/aws-sdk-perl
|
b2132fe3c79a06fd15b6137e8a0eb628de722e0f
|
[
"Apache-2.0"
] | 164 |
2015-01-08T14:58:53.000Z
|
2022-02-20T19:16:24.000Z
|
auto-lib/Paws/SavingsPlans/DescribeSavingsPlansOfferings.pm
|
0leksii/aws-sdk-perl
|
b2132fe3c79a06fd15b6137e8a0eb628de722e0f
|
[
"Apache-2.0"
] | 348 |
2015-01-07T22:08:38.000Z
|
2022-01-27T14:34:44.000Z
|
auto-lib/Paws/SavingsPlans/DescribeSavingsPlansOfferings.pm
|
0leksii/aws-sdk-perl
|
b2132fe3c79a06fd15b6137e8a0eb628de722e0f
|
[
"Apache-2.0"
] | 87 |
2015-04-22T06:29:47.000Z
|
2021-09-29T14:45:55.000Z
|
package Paws::SavingsPlans::DescribeSavingsPlansOfferings;
use Moose;
has Currencies => (is => 'ro', isa => 'ArrayRef[Str|Undef]', traits => ['NameInRequest'], request_name => 'currencies');
has Descriptions => (is => 'ro', isa => 'ArrayRef[Str|Undef]', traits => ['NameInRequest'], request_name => 'descriptions');
has Durations => (is => 'ro', isa => 'ArrayRef[Int]', traits => ['NameInRequest'], request_name => 'durations');
has Filters => (is => 'ro', isa => 'ArrayRef[Paws::SavingsPlans::SavingsPlanOfferingFilterElement]', traits => ['NameInRequest'], request_name => 'filters');
has MaxResults => (is => 'ro', isa => 'Int', traits => ['NameInRequest'], request_name => 'maxResults');
has NextToken => (is => 'ro', isa => 'Str', traits => ['NameInRequest'], request_name => 'nextToken');
has OfferingIds => (is => 'ro', isa => 'ArrayRef[Str|Undef]', traits => ['NameInRequest'], request_name => 'offeringIds');
has Operations => (is => 'ro', isa => 'ArrayRef[Str|Undef]', traits => ['NameInRequest'], request_name => 'operations');
has PaymentOptions => (is => 'ro', isa => 'ArrayRef[Str|Undef]', traits => ['NameInRequest'], request_name => 'paymentOptions');
has PlanTypes => (is => 'ro', isa => 'ArrayRef[Str|Undef]', traits => ['NameInRequest'], request_name => 'planTypes');
has ProductType => (is => 'ro', isa => 'Str', traits => ['NameInRequest'], request_name => 'productType');
has ServiceCodes => (is => 'ro', isa => 'ArrayRef[Str|Undef]', traits => ['NameInRequest'], request_name => 'serviceCodes');
has UsageTypes => (is => 'ro', isa => 'ArrayRef[Str|Undef]', traits => ['NameInRequest'], request_name => 'usageTypes');
use MooseX::ClassAttribute;
class_has _api_call => (isa => 'Str', is => 'ro', default => 'DescribeSavingsPlansOfferings');
class_has _api_uri => (isa => 'Str', is => 'ro', default => '/DescribeSavingsPlansOfferings');
class_has _api_method => (isa => 'Str', is => 'ro', default => 'POST');
class_has _returns => (isa => 'Str', is => 'ro', default => 'Paws::SavingsPlans::DescribeSavingsPlansOfferingsResponse');
1;
### main pod documentation begin ###
=head1 NAME
Paws::SavingsPlans::DescribeSavingsPlansOfferings - Arguments for method DescribeSavingsPlansOfferings on L<Paws::SavingsPlans>
=head1 DESCRIPTION
This class represents the parameters used for calling the method DescribeSavingsPlansOfferings on the
L<AWS Savings Plans|Paws::SavingsPlans> service. Use the attributes of this class
as arguments to method DescribeSavingsPlansOfferings.
You shouldn't make instances of this class. Each attribute should be used as a named argument in the call to DescribeSavingsPlansOfferings.
=head1 SYNOPSIS
my $savingsplans = Paws->service('SavingsPlans');
my $DescribeSavingsPlansOfferingsResponse =
$savingsplans->DescribeSavingsPlansOfferings(
Currencies => [
'CNY', ... # values: CNY, USD
], # OPTIONAL
Descriptions => [ 'MySavingsPlanDescription', ... ], # OPTIONAL
Durations => [ 1, ... ], # OPTIONAL
Filters => [
{
Name => 'region', # values: region, instanceFamily; OPTIONAL
Values => [ 'MyJsonSafeFilterValueString', ... ], # OPTIONAL
},
...
], # OPTIONAL
MaxResults => 1, # OPTIONAL
NextToken => 'MyPaginationToken', # OPTIONAL
OfferingIds => [ 'MyUUID', ... ], # OPTIONAL
Operations => [
'MySavingsPlanOperation', ... # max: 255
], # OPTIONAL
PaymentOptions => [
'All Upfront', ... # values: All Upfront, Partial Upfront, No Upfront
], # OPTIONAL
PlanTypes => [
'Compute', ... # values: Compute, EC2Instance, SageMaker
], # OPTIONAL
ProductType => 'EC2', # OPTIONAL
ServiceCodes => [
'MySavingsPlanServiceCode', ... # max: 255
], # OPTIONAL
UsageTypes => [
'MySavingsPlanUsageType', ... # max: 255
], # OPTIONAL
);
# Results:
my $NextToken = $DescribeSavingsPlansOfferingsResponse->NextToken;
my $SearchResults = $DescribeSavingsPlansOfferingsResponse->SearchResults;
# Returns a L<Paws::SavingsPlans::DescribeSavingsPlansOfferingsResponse> object.
Values for attributes that are native types (Int, String, Float, etc) can passed as-is (scalar values). Values for complex Types (objects) can be passed as a HashRef. The keys and values of the hashref will be used to instance the underlying object.
For the AWS API documentation, see L<https://docs.aws.amazon.com/goto/WebAPI/savingsplans/DescribeSavingsPlansOfferings>
=head1 ATTRIBUTES
=head2 Currencies => ArrayRef[Str|Undef]
The currencies.
=head2 Descriptions => ArrayRef[Str|Undef]
The descriptions.
=head2 Durations => ArrayRef[Int]
The durations, in seconds.
=head2 Filters => ArrayRef[L<Paws::SavingsPlans::SavingsPlanOfferingFilterElement>]
The filters.
=head2 MaxResults => Int
The maximum number of results to return with a single call. To retrieve
additional results, make another call with the returned token value.
=head2 NextToken => Str
The token for the next page of results.
=head2 OfferingIds => ArrayRef[Str|Undef]
The IDs of the offerings.
=head2 Operations => ArrayRef[Str|Undef]
The specific AWS operation for the line item in the billing report.
=head2 PaymentOptions => ArrayRef[Str|Undef]
The payment options.
=head2 PlanTypes => ArrayRef[Str|Undef]
The plan type.
=head2 ProductType => Str
The product type.
Valid values are: C<"EC2">, C<"Fargate">, C<"Lambda">, C<"SageMaker">
=head2 ServiceCodes => ArrayRef[Str|Undef]
The services.
=head2 UsageTypes => ArrayRef[Str|Undef]
The usage details of the line item in the billing report.
=head1 SEE ALSO
This class forms part of L<Paws>, documenting arguments for method DescribeSavingsPlansOfferings in L<Paws::SavingsPlans>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: L<https://github.com/pplu/aws-sdk-perl>
Please report bugs to: L<https://github.com/pplu/aws-sdk-perl/issues>
=cut
| 33.884615 | 249 | 0.665315 |
ed1e6c81f2b1030a87685450a80eaa5c7ef73483
| 977 |
pm
|
Perl
|
__module__.pm
|
geostarling/rex-gentoo-kernel
|
c3d58ada9b98712014484d9c88b988a6c39e35a8
|
[
"MIT"
] | null | null | null |
__module__.pm
|
geostarling/rex-gentoo-kernel
|
c3d58ada9b98712014484d9c88b988a6c39e35a8
|
[
"MIT"
] | null | null | null |
__module__.pm
|
geostarling/rex-gentoo-kernel
|
c3d58ada9b98712014484d9c88b988a6c39e35a8
|
[
"MIT"
] | null | null | null |
package Rex::Gentoo::Kernel;
use Rex -base;
use Rex::Template::TT;
use Term::ANSIColor;
desc 'Compile and install Gentoo kernel';
task 'setup', sub {
pkg "gentoo-sources",
ensure => "present";
file "/usr/src/linux/.config",
content => template("templates/usr/src/linux/config.tt"),
on_change => sub {
run "make", cwd => "/usr/src/linux", auto_die => TRUE;
run "make modules_install", cwd => "/usr/src/linux", auto_die => TRUE;
run "make install", cwd => "/usr/src/linux", auto_die => TRUE;
};
};
task 'upgrade', sub {
# TODO
};
1;
=pod
=head1 NAME
$::module_name - {{ SHORT DESCRIPTION }}
=head1 DESCRIPTION
{{ LONG DESCRIPTION }}
=head1 USAGE
{{ USAGE DESCRIPTION }}
include qw/Rex::Gentoo::Install/;
task yourtask => sub {
Rex::Gentoo::Install::example();
};
=head1 TASKS
=over 4
=item example
This is an example Task. This task just output's the uptime of the system.
=back
=cut
| 16.016393 | 76 | 0.618219 |
ed48c095153e80ae9c7fdc758a8ca9411979c97a
| 5,754 |
t
|
Perl
|
S15-nfg/GraphemeBreakTest.t
|
lathropd/roast
|
85a3ffb9eda173337ea98bb857327916237ce87c
|
[
"Artistic-2.0"
] | null | null | null |
S15-nfg/GraphemeBreakTest.t
|
lathropd/roast
|
85a3ffb9eda173337ea98bb857327916237ce87c
|
[
"Artistic-2.0"
] | null | null | null |
S15-nfg/GraphemeBreakTest.t
|
lathropd/roast
|
85a3ffb9eda173337ea98bb857327916237ce87c
|
[
"Artistic-2.0"
] | null | null | null |
## WHEN UPDATING UNICODE VERSION ALSO UPDATE docs/unicode-generated-tests.asciidoc
use v6;
my IO::Path $repo-dir = $?FILE.IO.parent(2).add("3rdparty/Unicode/11.0.0/ucd/auxiliary/GraphemeBreakTest.txt");
my IO::Path $rakudo-subdir = $?FILE.IO.parent(2);
my IO::Path $rakudo-dir = $rakudo-subdir.child($repo-dir);
my Str:D $location = $rakudo-dir.e ?? $rakudo-dir.Str !! $repo-dir.Str;
our $DEBUG;
use Test;
=begin pod
=NAME Unicode GraphemeBreakTest
=DESCRIPTION
Unicode Data files in 3rdparty/Unicode/ and the snippet of commented code below
are under SPDX-License-Identifier: Unicode-DFS-2016
See 3rdparty/Unicode/LICENSE for full text of license.
From GraphemeBreakTest.txt Unicode 9.0
=USAGE
If you run the script with --only=900,888 it will run only the line numbers
supplied as a commas seperated list of line numbers. Using --debug will give
additional debug info. Can supply datafile manually with --file=filename.txt
but that should not be required.
# Default Grapheme Break Test
#
# Format:
# <string> (# <comment>)?
# <string> contains hex Unicode code points, with
# ÷ wherever there is a break opportunity, and
# × wherever there is not.
# <comment> the format can change, but currently it shows:
# - the sample character name
# - (x) the Grapheme_Cluster_Break property value for the sample character
# - [x] the rule that determines whether there is a break or not
=head1 HOW TO FUDGE
=para The keys of the hash below are line numbers of the unicode test document.
values are either set to ALL or set to one or more of C,0,1,2,3,4..
=para B<Example>:
=item3 C<not ok 2384 - Line 835: grapheme [1] has correct codepoints>
=para You can add 835 => ['1'] to the hash and it will fudge that line for you
=end pod
constant %fudged-tests = {
694 => ['ALL'],
695 => ['ALL'],
591 => ['ALL'],
};
constant @lines-with-normalization = (
441 => [0, ],
674 => [ 0, ],
678 => [ 0, ],
679 => [ 0, ],
686 => [ 0, ],
);
sub MAIN (Str:D :$file = $location, Str :$only, Bool:D :$debug = False) {
$DEBUG = $debug;
note "WHEN UPDATING UNICODE VERSION ALSO UPDATE docs/unicode-generated-tests.asciidoc";
my @only = $only ?? $only.split([',', ' ']) !! Empty;
die "Can't find file at ", $file.IO.absolute unless $file.IO.f;
note "Reading file ", $file.IO.absolute;
my @fail;
plan (1943);
for $file.IO.lines -> $line {
process-line $line, @fail, :@only;
}
my $bag = @fail.Bag;
note "Grapheme_Cluster_Break test: Failed {$bag.elems} lines: ", $bag;
}
grammar GraphemeBreakTest {
token TOP { [<.ws> [<break> | <nobreak>] <.ws>]+ % <hex> <comment> }
token hex { <:AHex>+ }
token break { '÷' }
token nobreak { '×' }
token comment { '#' .* $ }
}
class parser {
has @!ord-array;
method TOP ($/) {
my @list = $/.caps;
my @stack;
my @results;
note $/ if $DEBUG;
sub move-from-stack {
if @stack {
@results[@results.elems].append: @stack;
@stack = [];
}
}
for @list {
if .key eq 'nobreak' {
say 'nobreak' if $DEBUG;
}
elsif .key eq 'break' {
note 'break' if $DEBUG;
move-from-stack;
}
elsif .key eq 'hex' {
@stack.push: :16(~.value);
}
}
my $string = @results».List.flat.chrs;
move-from-stack;
note @results.raku if $DEBUG;
make {
string => $string,
ord-array => @results
}
}
}
sub process-line (Str:D $line, @fail, :@only!) {
state $line-no = 0;
$line-no++;
return if @only and $line-no ne @only.any;
return if $line.starts-with('#');
my Bool:D $fudge-b = %fudged-tests{$line-no}:exists ?? True !! False;
note 'LINE: [' ~ $line ~ ']' if $DEBUG;
my $list = GraphemeBreakTest.parse(
$line,
actions => parser
).made;
die "line $line-no undefined parse" if $list.defined.not;
if $fudge-b {
if %fudged-tests{$line-no}.any eq 'ALL' {
todo("line $line-no todo for {%fudged-tests{$line-no}.Str} tests", 1 + $list<ord-array>.elems);
$fudge-b = False; # We already have todo'd don't attempt again
}
elsif %fudged-tests{$line-no}.any eq 'C' {
todo("[C] num of chars line $line-no", 1);
}
}
is-deeply $list<ord-array>.elems, $list<string>.chars, "Line $line-no: [C] right num of chars | {$list<string>.uninames.raku}" or @fail.push($line-no);
for ^$list<ord-array>.elems -> $elem {
if $fudge-b and %fudged-tests{$line-no}.any eq $elem {
todo "[$elem] grapheme line $line-no todo";
}
my Array $expected;
{
$expected = $list<ord-array>[$elem].flat.Array;
if $line-no eq @lines-with-normalization».key.any {
my $pair = @lines-with-normalization.first({.key eq $line-no});
if $pair.value.any eqv $elem {
$expected = $expected.chrs.ords.flat.Array;
}
}
if $expected.chrs.ords.Array !eqv $expected {
die "codepoints change under normalization. manually check and add an exception or fix the script\n" ~ "
line no $line-no: elem $elem. Got: ", $expected.chrs.ords.Array.join(', '), ' from: ', $expected.join(',');
}
}
is-deeply $list<string>.substr($elem, 1).ords.flat.Array, $expected, "Line $line-no: grapheme [$elem] has correct codepoints" or @fail.push($line-no);
}
}
# vim: expandtab shiftwidth=4
| 35.73913 | 158 | 0.573862 |
73f410002bd5b4b6df875b167a8de0e109a3d9a4
| 17,250 |
pm
|
Perl
|
tests/tap/perl/Test/RRA/Automake.pm
|
ktdreyer/remctl
|
cd89bf24c2d4a0a7bb42ad70b404ad20b02d3048
|
[
"TCL",
"Ruby",
"Unlicense",
"MIT"
] | null | null | null |
tests/tap/perl/Test/RRA/Automake.pm
|
ktdreyer/remctl
|
cd89bf24c2d4a0a7bb42ad70b404ad20b02d3048
|
[
"TCL",
"Ruby",
"Unlicense",
"MIT"
] | null | null | null |
tests/tap/perl/Test/RRA/Automake.pm
|
ktdreyer/remctl
|
cd89bf24c2d4a0a7bb42ad70b404ad20b02d3048
|
[
"TCL",
"Ruby",
"Unlicense",
"MIT"
] | null | null | null |
# Helper functions for Perl test programs in Automake distributions.
#
# This module provides a collection of helper functions used by test programs
# written in Perl and included in C source distributions that use Automake.
# They embed knowledge of how I lay out my source trees and test suites with
# Autoconf and Automake. They may be usable by others, but doing so will
# require closely following the conventions implemented by the rra-c-util
# utility collection.
#
# All the functions here assume that C_TAP_BUILD and C_TAP_SOURCE are set in
# the environment. This is normally done via the C TAP Harness runtests
# wrapper.
#
# SPDX-License-Identifier: MIT
package Test::RRA::Automake;
use 5.006;
use strict;
use warnings;
# For Perl 5.006 compatibility.
## no critic (ClassHierarchies::ProhibitExplicitISA)
use Exporter;
use File::Find qw(find);
use File::Spec;
use Test::More;
use Test::RRA::Config qw($LIBRARY_PATH);
# Used below for use lib calls.
my ($PERL_BLIB_ARCH, $PERL_BLIB_LIB);
# Determine the path to the build tree of any embedded Perl module package in
# this source package. We do this in a BEGIN block because we're going to use
# the results in a use lib command below.
BEGIN {
$PERL_BLIB_ARCH = File::Spec->catdir(qw(perl blib arch));
$PERL_BLIB_LIB = File::Spec->catdir(qw(perl blib lib));
# If C_TAP_BUILD is set, we can come up with better values.
if (defined($ENV{C_TAP_BUILD})) {
my ($vol, $dirs) = File::Spec->splitpath($ENV{C_TAP_BUILD}, 1);
my @dirs = File::Spec->splitdir($dirs);
pop(@dirs);
$PERL_BLIB_ARCH = File::Spec->catdir(@dirs, qw(perl blib arch));
$PERL_BLIB_LIB = File::Spec->catdir(@dirs, qw(perl blib lib));
}
}
# Prefer the modules built as part of our source package. Otherwise, we may
# not find Perl modules while testing, or find the wrong versions.
use lib $PERL_BLIB_ARCH;
use lib $PERL_BLIB_LIB;
# Declare variables that should be set in BEGIN for robustness.
our (@EXPORT_OK, @ISA, $VERSION);
# Set $VERSION and everything export-related in a BEGIN block for robustness
# against circular module loading (not that we load any modules, but
# consistency is good).
BEGIN {
@ISA = qw(Exporter);
@EXPORT_OK = qw(
all_files automake_setup perl_dirs test_file_path test_tmpdir
);
# This version should match the corresponding rra-c-util release, but with
# two digits for the minor version, including a leading zero if necessary,
# so that it will sort properly.
$VERSION = '7.01';
}
# Directories to skip globally when looking for all files, or for directories
# that could contain Perl files.
my @GLOBAL_SKIP = qw(.git _build autom4te.cache build-aux);
# Additional paths to skip when building a list of all files in the
# distribution. This primarily skips build artifacts that aren't interesting
# to any of the tests. These match any path component.
my @FILES_SKIP = qw(
.deps .dirstamp .libs aclocal.m4 config.h config.h.in config.h.in~ config.log
config.status configure
);
# The temporary directory created by test_tmpdir, if any. If this is set,
# attempt to remove the directory stored here on program exit (but ignore
# failure to do so).
my $TMPDIR;
# Returns a list of all files in the distribution.
#
# Returns: List of files
sub all_files {
my @files;
# Turn the skip lists into hashes for ease of querying.
my %skip = map { $_ => 1 } @GLOBAL_SKIP;
my %files_skip = map { $_ => 1 } @FILES_SKIP;
# Wanted function for find. Prune anything matching either of the skip
# lists, or *.lo files, and then add all regular files to the list.
my $wanted = sub {
my $file = $_;
my $path = $File::Find::name;
$path =~ s{ \A [.]/ }{}xms;
if ($skip{$path} or $files_skip{$file} or $file =~ m{ [.] lo \z }xms) {
$File::Find::prune = 1;
return;
}
if (-f $file) {
push(@files, $path);
}
};
# Do the recursive search and return the results.
find($wanted, q{.});
return @files;
}
# Perform initial test setup for running a Perl test in an Automake package.
# This verifies that C_TAP_BUILD and C_TAP_SOURCE are set and then changes
# directory to the C_TAP_SOURCE directory by default. Sets LD_LIBRARY_PATH if
# the $LIBRARY_PATH configuration option is set. Calls BAIL_OUT if
# C_TAP_BUILD or C_TAP_SOURCE are missing or if anything else fails.
#
# $args_ref - Reference to a hash of arguments to configure behavior:
# chdir_build - If set to a true value, changes to C_TAP_BUILD instead of
# C_TAP_SOURCE
#
# Returns: undef
sub automake_setup {
my ($args_ref) = @_;
# Bail if C_TAP_BUILD or C_TAP_SOURCE are not set.
if (!$ENV{C_TAP_BUILD}) {
BAIL_OUT('C_TAP_BUILD not defined (run under runtests)');
}
if (!$ENV{C_TAP_SOURCE}) {
BAIL_OUT('C_TAP_SOURCE not defined (run under runtests)');
}
# C_TAP_BUILD or C_TAP_SOURCE will be the test directory. Change to the
# parent.
my $start;
if ($args_ref->{chdir_build}) {
$start = $ENV{C_TAP_BUILD};
} else {
$start = $ENV{C_TAP_SOURCE};
}
my ($vol, $dirs) = File::Spec->splitpath($start, 1);
my @dirs = File::Spec->splitdir($dirs);
pop(@dirs);
# Simplify relative paths at the end of the directory.
my $ups = 0;
my $i = $#dirs;
while ($i > 2 && $dirs[$i] eq File::Spec->updir) {
$ups++;
$i--;
}
for (1 .. $ups) {
pop(@dirs);
pop(@dirs);
}
my $root = File::Spec->catpath($vol, File::Spec->catdir(@dirs), q{});
chdir($root) or BAIL_OUT("cannot chdir to $root: $!");
# If C_TAP_BUILD is a subdirectory of C_TAP_SOURCE, add it to the global
# ignore list.
my ($buildvol, $builddirs) = File::Spec->splitpath($ENV{C_TAP_BUILD}, 1);
my @builddirs = File::Spec->splitdir($builddirs);
pop(@builddirs);
if ($buildvol eq $vol && @builddirs == @dirs + 1) {
while (@dirs && $builddirs[0] eq $dirs[0]) {
shift(@builddirs);
shift(@dirs);
}
if (@builddirs == 1) {
push(@GLOBAL_SKIP, $builddirs[0]);
}
}
# Set LD_LIBRARY_PATH if the $LIBRARY_PATH configuration option is set.
## no critic (Variables::RequireLocalizedPunctuationVars)
if (defined($LIBRARY_PATH)) {
@builddirs = File::Spec->splitdir($builddirs);
pop(@builddirs);
my $libdir = File::Spec->catdir(@builddirs, $LIBRARY_PATH);
my $path = File::Spec->catpath($buildvol, $libdir, q{});
if (-d "$path/.libs") {
$path .= '/.libs';
}
if ($ENV{LD_LIBRARY_PATH}) {
$ENV{LD_LIBRARY_PATH} .= ":$path";
} else {
$ENV{LD_LIBRARY_PATH} = $path;
}
}
return;
}
# Returns a list of directories that may contain Perl scripts and that should
# be passed to Perl test infrastructure that expects a list of directories to
# recursively check. The list will be all eligible top-level directories in
# the package except for the tests directory, which is broken out to one
# additional level. Calls BAIL_OUT on any problems
#
# $args_ref - Reference to a hash of arguments to configure behavior:
# skip - A reference to an array of directories to skip
#
# Returns: List of directories possibly containing Perl scripts to test
sub perl_dirs {
my ($args_ref) = @_;
# Add the global skip list. We also ignore the perl directory if it
# exists since, in my packages, it is treated as a Perl module
# distribution and has its own standalone test suite.
my @skip = $args_ref->{skip} ? @{ $args_ref->{skip} } : ();
push(@skip, @GLOBAL_SKIP, 'perl');
# Separate directories to skip under tests from top-level directories.
my @skip_tests = grep { m{ \A tests/ }xms } @skip;
@skip = grep { !m{ \A tests }xms } @skip;
for my $skip_dir (@skip_tests) {
$skip_dir =~ s{ \A tests/ }{}xms;
}
# Convert the skip lists into hashes for convenience.
my %skip = map { $_ => 1 } @skip, 'tests';
my %skip_tests = map { $_ => 1 } @skip_tests;
# Build the list of top-level directories to test.
opendir(my $rootdir, q{.}) or BAIL_OUT("cannot open .: $!");
my @dirs = grep { -d && !$skip{$_} } readdir($rootdir);
closedir($rootdir);
@dirs = File::Spec->no_upwards(@dirs);
# Add the list of subdirectories of the tests directory.
if (-d 'tests') {
opendir(my $testsdir, q{tests}) or BAIL_OUT("cannot open tests: $!");
# Skip if found in %skip_tests or if not a directory.
my $is_skipped = sub {
my ($dir) = @_;
return 1 if $skip_tests{$dir};
$dir = File::Spec->catdir('tests', $dir);
return -d $dir ? 0 : 1;
};
# Build the filtered list of subdirectories of tests.
my @test_dirs = grep { !$is_skipped->($_) } readdir($testsdir);
closedir($testsdir);
@test_dirs = File::Spec->no_upwards(@test_dirs);
# Add the tests directory to the start of the directory name.
push(@dirs, map { File::Spec->catdir('tests', $_) } @test_dirs);
}
return @dirs;
}
# Find a configuration file for the test suite. Searches relative to
# C_TAP_BUILD first and then C_TAP_SOURCE and returns whichever is found
# first. Calls BAIL_OUT if the file could not be found.
#
# $file - Partial path to the file
#
# Returns: Full path to the file
sub test_file_path {
my ($file) = @_;
BASE:
for my $base ($ENV{C_TAP_BUILD}, $ENV{C_TAP_SOURCE}) {
next if !defined($base);
if (-f "$base/$file") {
return "$base/$file";
}
}
BAIL_OUT("cannot find $file");
return;
}
# Create a temporary directory for tests to use for transient files and return
# the path to that directory. The directory is automatically removed on
# program exit. The directory permissions use the current umask. Calls
# BAIL_OUT if the directory could not be created.
#
# Returns: Path to a writable temporary directory
sub test_tmpdir {
my $path;
# If we already figured out what directory to use, reuse the same path.
# Otherwise, create a directory relative to C_TAP_BUILD if set.
if (defined($TMPDIR)) {
$path = $TMPDIR;
} else {
my $base;
if (defined($ENV{C_TAP_BUILD})) {
$base = $ENV{C_TAP_BUILD};
} else {
$base = File::Spec->curdir;
}
$path = File::Spec->catdir($base, 'tmp');
}
# Create the directory if it doesn't exist.
if (!-d $path) {
if (!mkdir($path, 0777)) {
BAIL_OUT("cannot create directory $path: $!");
}
}
# Store the directory name for cleanup and return it.
$TMPDIR = $path;
return $path;
}
# On program exit, remove $TMPDIR if set and if possible. Report errors with
# diag but otherwise ignore them.
END {
if (defined($TMPDIR) && -d $TMPDIR) {
local $! = undef;
if (!rmdir($TMPDIR)) {
diag("cannot remove temporary directory $TMPDIR: $!");
}
}
}
1;
__END__
=for stopwords
Allbery Automake Automake-aware Automake-based rra-c-util ARGS subdirectories
sublicense MERCHANTABILITY NONINFRINGEMENT umask
=head1 NAME
Test::RRA::Automake - Automake-aware support functions for Perl tests
=head1 SYNOPSIS
use Test::RRA::Automake qw(automake_setup perl_dirs test_file_path);
automake_setup({ chdir_build => 1 });
# Paths to directories that may contain Perl scripts.
my @dirs = perl_dirs({ skip => [qw(lib)] });
# Configuration for Kerberos tests.
my $keytab = test_file_path('config/keytab');
=head1 DESCRIPTION
This module collects utility functions that are useful for test scripts
written in Perl and included in a C Automake-based package. They assume the
layout of a package that uses rra-c-util and C TAP Harness for the test
structure.
Loading this module will also add the directories C<perl/blib/arch> and
C<perl/blib/lib> to the Perl library search path, relative to C_TAP_BUILD if
that environment variable is set. This is harmless for C Automake projects
that don't contain an embedded Perl module, and for those projects that do,
this will allow subsequent C<use> calls to find modules that are built as part
of the package build process.
The automake_setup() function should be called before calling any other
functions provided by this module.
=head1 FUNCTIONS
None of these functions are imported by default. The ones used by a script
should be explicitly imported. On failure, all of these functions call
BAIL_OUT (from Test::More).
=over 4
=item all_files()
Returns a list of all "interesting" files in the distribution that a test
suite may want to look at. This excludes various products of the build system,
the build directory if it's under the source directory, and a few other
uninteresting directories like F<.git>. The returned paths will be paths
relative to the root of the package.
=item automake_setup([ARGS])
Verifies that the C_TAP_BUILD and C_TAP_SOURCE environment variables are set
and then changes directory to the top of the source tree (which is one
directory up from the C_TAP_SOURCE path, since C_TAP_SOURCE points to the top
of the tests directory).
If ARGS is given, it should be a reference to a hash of configuration options.
Only one option is supported: C<chdir_build>. If it is set to a true value,
automake_setup() changes directories to the top of the build tree instead.
=item perl_dirs([ARGS])
Returns a list of directories that may contain Perl scripts that should be
tested by test scripts that test all Perl in the source tree (such as syntax
or coding style checks). The paths will be simple directory names relative to
the current directory or two-part directory names under the F<tests>
directory. (Directories under F<tests> are broken out separately since it's
common to want to apply different policies to different subdirectories of
F<tests>.)
If ARGS is given, it should be a reference to a hash of configuration options.
Only one option is supported: C<skip>, whose value should be a reference to an
array of additional top-level directories or directories starting with
C<tests/> that should be skipped.
=item test_file_path(FILE)
Given FILE, which should be a relative path, locates that file relative to the
test directory in either the source or build tree. FILE will be checked for
relative to the environment variable C_TAP_BUILD first, and then relative to
C_TAP_SOURCE. test_file_path() returns the full path to FILE or calls
BAIL_OUT if FILE could not be found.
=item test_tmpdir()
Create a temporary directory for tests to use for transient files and return
the path to that directory. The directory is created relative to the
C_TAP_BUILD environment variable, which must be set. Permissions on the
directory are set using the current umask. test_tmpdir() returns the full
path to the temporary directory or calls BAIL_OUT if it could not be created.
The directory is automatically removed if possible on program exit. Failure
to remove the directory on exit is reported with diag() and otherwise ignored.
=back
=head1 ENVIRONMENT
=over 4
=item C_TAP_BUILD
The root of the tests directory in Automake build directory for this package,
used to find files as documented above.
=item C_TAP_SOURCE
The root of the tests directory in the source tree for this package, used to
find files as documented above.
=back
=head1 AUTHOR
Russ Allbery <[email protected]>
=head1 COPYRIGHT AND LICENSE
Copyright 2014, 2015, 2018 Russ Allbery <[email protected]>
Copyright 2013 The Board of Trustees of the Leland Stanford Junior University
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
=head1 SEE ALSO
Test::More(3), Test::RRA(3), Test::RRA::Config(3)
This module is maintained in the rra-c-util package. The current version is
available from L<https://www.eyrie.org/~eagle/software/rra-c-util/>.
The C TAP Harness test driver and libraries for TAP-based C testing are
available from L<https://www.eyrie.org/~eagle/software/c-tap-harness/>.
=cut
# Local Variables:
# copyright-at-end-flag: t
# End:
| 35.276074 | 79 | 0.690319 |
ed4959b8a13a335a6284d6378fc9b058f3f59776
| 1,317 |
pl
|
Perl
|
main/solenv/bin/getrevision.pl
|
ackza/openoffice
|
d49dfe9c625750e261c7ed8d6ccac8d361bf3418
|
[
"Apache-2.0"
] | 1 |
2019-12-27T19:25:34.000Z
|
2019-12-27T19:25:34.000Z
|
main/solenv/bin/getrevision.pl
|
ackza/openoffice
|
d49dfe9c625750e261c7ed8d6ccac8d361bf3418
|
[
"Apache-2.0"
] | null | null | null |
main/solenv/bin/getrevision.pl
|
ackza/openoffice
|
d49dfe9c625750e261c7ed8d6ccac8d361bf3418
|
[
"Apache-2.0"
] | null | null | null |
#**************************************************************
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
#**************************************************************
#### module lookup
use File::Basename;
my @lib_dirs;
BEGIN {
if ( !defined($ENV{SOLARENV}) ) {
die "No environment found (environment variable SOLARENV is undefined)";
}
push(@lib_dirs, "$ENV{SOLARENV}/bin/modules");
}
use lib (@lib_dirs);
use SvnRevision;
my $scm_root_dir=dirname($ENV{SRC_ROOT});
print SvnRevision::DetectRevisionId("$scm_root_dir");
| 33.769231 | 80 | 0.656796 |
ed50dddfcf4ab763b689fb55f3e4ee7a3c779e47
| 3,225 |
pm
|
Perl
|
lib/Krawfish/Util/Buffer.pm
|
KorAP/Krawfish-prototype
|
1ec0229d15c23c5ca2e1734425d5fdd5212a1e30
|
[
"BSD-2-Clause"
] | null | null | null |
lib/Krawfish/Util/Buffer.pm
|
KorAP/Krawfish-prototype
|
1ec0229d15c23c5ca2e1734425d5fdd5212a1e30
|
[
"BSD-2-Clause"
] | null | null | null |
lib/Krawfish/Util/Buffer.pm
|
KorAP/Krawfish-prototype
|
1ec0229d15c23c5ca2e1734425d5fdd5212a1e30
|
[
"BSD-2-Clause"
] | null | null | null |
package Krawfish::Util::Buffer;
use Krawfish::Log;
use Carp qw/carp/;
use bytes;
use strict;
use warnings;
# Buffer contains a queue of spans, with a finger to point
# on certain positions in the queue
# TODO:
# See for an example implementation listpack.c (redis):
# - https://gist.github.com/antirez/66ffab20190ece8a7485bd9accfbc175
# TODO:
# Make this a Buffered Query, so it can have a simplified API
# For usage, with next (that may either use the buffer or the
# nested stream.
#
# It may probably need
# ->rewind
# ->forget
use constant DEBUG => 0;
# Constructor
sub new {
bless {
finger => 0,
array => []
}, shift;
};
# Go to the next element of the buffer
sub next {
my $self = shift;
print_log('buffer', "Try to forward buffer finger: " . $self->to_string) if DEBUG;
# print_log('buffer', "Finger: " . $self->finger . ' of ' . $self->size) if DEBUG;
$self->{finger}++;
if ($self->{finger} >= $self->size) {
print_log('buffer', 'Finger is already at the end of the buffer') if DEBUG;
return;
};
print_log('buffer', 'Forward buffer finger: ' . $self->to_string) if DEBUG;
return 1;
};
# Return the current element of the buffer
sub current {
my $self = shift;
return if $self->{finger} >= $self->size;
return $self->{array}->[$self->{finger}];
};
# Return the current position of the finger
# Or set the finger
sub finger {
if (defined $_[1]) {
$_[0]->{finger} = $_[1];
print_log('buffer', "Set finger to $_[1]: " . $_[0]->to_string) if DEBUG;
}
$_[0]->{finger};
};
#sub forward {
# $_[0]->{finger}++;
# print_log('buffer', 'Move finger forward') if DEBUG;
#};
sub backward {
$_[0]->{finger}--;
print_log('buffer', 'Move finger backwards') if DEBUG;
};
# Remember item
sub remember {
my $self = shift;
my $span = shift;
print_log('buffer', "Remember $span in buffer: " . $self->to_string) if DEBUG;
push @{$self->{array}}, $span;
return 1;
};
sub first {
$_[0]->{array}->[0];
};
# Reset finger to start position
sub rewind {
$_[0]->{finger} = 0;
print_log('buffer', 'Reset buffer finger: ' . $_[0]->to_string) if DEBUG;
};
# Position finger to last element
sub to_end {
my $self = shift;
$self->{finger} = $self->size - 1;
};
# Check size
sub size {
return scalar @{$_[0]->{array}};
};
# Forget first element and reposition finger
sub forget {
my $span = shift(@{$_[0]->{array}});
unless ($span) {
carp 'Nothing to forget';
return;
};
print_log('buffer', "Forget span $span: " . $_[0]->to_string) if DEBUG;
# decrement finger
$_[0]->{finger}--;
print_log('buffer', "Buffer is now " . $_[0]->to_string) if DEBUG;
return 1;
};
# Clear buffer
sub clear {
print_log('buffer', 'Clear buffer list') if DEBUG;
$_[0]->{array} = [];
$_[0]->{finger} = 0;
};
# Stringify buffer content
sub to_string {
my $self = shift;
my $string = '';
my $finger = $self->{finger};
foreach (0 .. $finger-1) {
$string .= ($self->{array}->[$_] // '');
};
$string .= ' <';
$string .= $self->{array}->[$finger] // '';
$string .= '> ';
foreach ($finger + 1 .. ($self->size - 1)) {
$string .= ($self->{array}->[$_] // '');
};
return $string;
};
1;
| 19.427711 | 84 | 0.597829 |
ed6379703a3c965fd33bec289225718e7a41b882
| 1,574 |
t
|
Perl
|
project-euler/215/t/to_from_id.t
|
zoffixznet/project-euler
|
39921379385ae2521354c7266a541c46785e85a2
|
[
"MIT"
] | null | null | null |
project-euler/215/t/to_from_id.t
|
zoffixznet/project-euler
|
39921379385ae2521354c7266a541c46785e85a2
|
[
"MIT"
] | null | null | null |
project-euler/215/t/to_from_id.t
|
zoffixznet/project-euler
|
39921379385ae2521354c7266a541c46785e85a2
|
[
"MIT"
] | null | null | null |
#!/usr/bin/perl
use strict;
use warnings;
use Test::More tests => 1;
use lib '.';
use Euler215 qw(from_id to_id);
use Test::Differences (qw( eq_or_diff ));
{
my $wall =
[ { l => 8, o => 3, }, { l => 7, o => 3, }, { l => 6, o => 2, }, ];
# TEST
eq_or_diff(
from_id( to_id($wall) ),
$wall, "Back and forth from_id/to_id",
);
}
=head1 COPYRIGHT & LICENSE
Copyright 2017 by Shlomi Fish
This program is distributed under the MIT / Expat License:
L<http://www.opensource.org/licenses/mit-license.php>
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
=cut
| 29.698113 | 75 | 0.729352 |
73db55e1ca6940d46a0cfa67b52b13ab8d1595aa
| 32,429 |
pl
|
Perl
|
perl/vGhettoClone_extended.pl
|
mtboren/vghetto-scripts_fork
|
dbf0fdbfc39430bba000820019645bf5c538c996
|
[
"MIT",
"Unlicense"
] | 1 |
2021-01-11T20:34:46.000Z
|
2021-01-11T20:34:46.000Z
|
perl/vGhettoClone_extended.pl
|
v1ckxy/vghetto-scripts
|
8d2b7bcb62992dcabea60e525962bfa70305e8d5
|
[
"MIT",
"Unlicense"
] | null | null | null |
perl/vGhettoClone_extended.pl
|
v1ckxy/vghetto-scripts
|
8d2b7bcb62992dcabea60e525962bfa70305e8d5
|
[
"MIT",
"Unlicense"
] | null | null | null |
#!/usr/bin/perl -w
#
# Copyright (c) 2007 VMware, Inc. All rights reserved.
#
# Modified 'vmclone.pl' from VMware's VI Perl Toolkit Utilites by William Lam
# http://engineering.ucsb.edu/~duonglt/vmware/vGhettoLinkedClone.html
#
# Modified by Chip Schweiss, [email protected]
# Merged changes from vmclone2.pl (https://communities.vmware.com/docs/DOC-12746) by Bill Call
# Allows for customization of either Windows or Linux VMs using the same schema.
# Added support for cloning via linked clones or copying.
# Added the ability to move the clone to specified folder
# Added a switch to control power on after cloning
# Added configuration to resolv.conf on Linux cloning
# TODO: Add resource reservations, annotations, CPU cores
use strict;
use warnings;
use Switch;
use FindBin;
use lib "$FindBin::Bin/../";
use lib "/etc/puppetmaster/global/bin/autodeploy/tools/vghetto-scripts/perl";
use VMware::VIRuntime;
use XML::LibXML;
use AppUtil::VMUtil;
use AppUtil::HostUtil;
use AppUtil::XMLInputUtil;
$Util::script_version = "1.0";
sub check_missing_value;
my %opts = (
vmhost => {
type => "=s",
help => "The name of the host",
required => 1,
},
vmname => {
type => "=s",
help => "The name of the Virtual Machine",
required => 1,
},
vmname_destination => {
type => "=s",
help => "The name of the target virtual machine",
required => 1,
},
filename => {
type => "=s",
help => "The name of the configuration specification file",
required => 0,
default => "../sampledata/vmclone.xml",
},
datastore => {
type => "=s",
help => "Name of the Datastore",
required => 0,
},
snapname => {
type => "=s",
help => "Name of Snapshot from pristine base image",
required => 1,
},
folder => {
type => "=s",
help => "Folder to place the clone in",
required => 0,
},
clone_type => {
type => "=s",
help => "Specify the clone type to perform [linked|copy]",
required => 0,
default => 'linked',
},
convert => {
type => "=s",
help => "Convert destination disk type [source|sesparse]",
required => 0,
default => 'source',
},
grainsize => {
type => "=s",
help => "Grainsize for SE Sparse disk [default 1024k]",
required => 0,
default => 1024,
},
power_vm => {
type => "=s",
help => "Flag to specify whether or not to power on virtual machine after cloning"
. "yes,no",
required => 0,
default => 'no',
},
customize_guest => {
type => "=s",
help => "Flag to specify whether or not to customize guest: yes,no",
required => 0,
default => 'no',
},
customize_vm => {
type => "=s",
help => "Flag to specify whether or not to customize virtual machine: "
. "yes,no",
required => 0,
default => 'no',
},
schema => {
type => "=s",
help => "The name of the schema file",
required => 0,
default => "../schema/vmclone.xsd",
},
);
Opts::add_options(%opts);
Opts::parse();
Opts::validate(\&validate);
Util::connect();
clone_vm();
Util::disconnect();
# Clone vm operation
# Gets destination host, compute resource views, and
# datastore info for creating the configuration
# specification to help create a clone of an existing
# virtual machine.
# ====================================================
sub clone_vm {
my $vm_name = Opts::get_option('vmname');
my $clone_name = Opts::get_option('vmname_destination');
my $clone_type = Opts::get_option('clone_type');
my $vm_snapshot_name = Opts::get_option('snapname');
my $convert = Opts::get_option('convert');
my $grainsize = Opts::get_option('grainsize');
my $vm_views = Vim::find_entity_views(view_type => 'VirtualMachine',
filter => {'name' =>$vm_name});
my $parser = XML::LibXML->new();
my $tree = $parser->parse_file(Opts::get_option('filename'));
my $root = $tree->getDocumentElement;
my @cspec = $root->findnodes('Virtual-Machine-Spec');
my $clone_view;
my $config_spec_operation;
my @NIC;
my $nic_network;
my $nic_adapter;
if(@$vm_views) {
foreach (@$vm_views) {
my $host_name = Opts::get_option('vmhost');
my $host_view = Vim::find_entity_view(view_type => 'HostSystem',
filter => {'name' => $host_name});
if (!$host_view) {
Util::trace(0, "Host '$host_name' not found\n");
return;
}
if ($host_view) {
my $comp_res_view = Vim::get_view(mo_ref => $host_view->parent);
my $ds_name = Opts::get_option('datastore');
my %ds_info = HostUtils::get_datastore(host_view => $host_view,
datastore => $ds_name,
disksize => get_disksize());
if ($ds_info{mor} eq 0) {
if ($ds_info{name} eq 'datastore_error') {
Util::trace(0, "\nDatastore $ds_name not available.\n");
return;
}
if ($ds_info{name} eq 'disksize_error') {
Util::trace(0, "\nThe free space available is less than the"
. " specified disksize or the host"
. " is not accessible.\n");
return;
}
}
my ($vm_snapshot,$ref,$nRefs);
if(defined $_->snapshot) {
($ref, $nRefs) = find_snapshot_name ($_->snapshot->rootSnapshotList,
$vm_snapshot_name);
}
if (defined $ref && $nRefs == 1) {
$vm_snapshot = Vim::get_view (mo_ref =>$ref->snapshot);
}
else {
Util::trace(0, "\nSnapshot $vm_snapshot_name not found. \n");
return;
}
my ($diskLocator,$relocate_spec,$diskType,$diskId);
my $vm_device = $_->config->hardware->device;
foreach my $vdevice (@$vm_device) {
if($vdevice->isa('VirtualDisk')) {
$diskId = $vdevice->key;
last;
}
}
if ($clone_type eq "copy") {
$convert = "source";
$relocate_spec = VirtualMachineRelocateSpec->new(datastore => $ds_info{mor},
host => $host_view,
pool => $comp_res_view->resourcePool);
}
elsif ($convert eq "sesparse" && Vim::get_service_content()->about->version eq "5.1.0") {
my $newdiskName = "[" . $ds_name . "] " . $clone_name . "/" . $clone_name . ".vmdk";
$diskLocator = VirtualMachineRelocateSpecDiskLocator->new(datastore => $ds_info{mor},
diskBackingInfo => VirtualDiskFlatVer2BackingInfo->new(fileName => $newdiskName,
diskMode => 'persistent',
deltaDiskFormat => 'seSparseFormat',
deltaGrainSize => $grainsize),
diskId => $diskId);
$relocate_spec = VirtualMachineRelocateSpec->new(datastore => $ds_info{mor},
host => $host_view,
diskMoveType => "createNewChildDiskBacking",
pool => $comp_res_view->resourcePool,
disk => [$diskLocator]);
} else {
$relocate_spec = VirtualMachineRelocateSpec->new(datastore => $ds_info{mor},
host => $host_view,
diskMoveType => "createNewChildDiskBacking",
pool => $comp_res_view->resourcePool);
}
my $clone_spec ;
my $config_spec;
my $customization_spec;
my $poweron;
# We will trigger the power on after cloning if spceified
$poweron = 0;
if ((Opts::get_option('customize_vm') eq "yes")
&& (Opts::get_option('customize_guest') ne "yes")) {
$config_spec = get_config_spec();
$clone_spec = VirtualMachineCloneSpec->new(powerOn => 0,
template => 0,
snapshot => $vm_snapshot,
location => $relocate_spec,
config => $config_spec,
);
}
elsif ((Opts::get_option('customize_guest') eq "yes")
&& (Opts::get_option('customize_vm') ne "yes")) {
$customization_spec = VMUtils::get_customization_spec
(Opts::get_option('filename'));
$clone_spec = VirtualMachineCloneSpec->new(powerOn => $poweron,
template => 0,
snapshot => $vm_snapshot,
location => $relocate_spec,
customization => $customization_spec,
);
}
elsif ((Opts::get_option('customize_guest') eq "yes")
&& (Opts::get_option('customize_vm') eq "yes")) {
$customization_spec = VMUtils::get_customization_spec
(Opts::get_option('filename'));
$config_spec = get_config_spec();
$clone_spec = VirtualMachineCloneSpec->new(
powerOn => $poweron,
template => 0,
snapshot => $vm_snapshot,
location => $relocate_spec,
customization => $customization_spec,
config => $config_spec,
);
}
else {
$clone_spec = VirtualMachineCloneSpec->new(
powerOn => $poweron,
template => 0,
snapshot => $vm_snapshot,
location => $relocate_spec,
);
}
$Data::Dumper::Sortkeys = 1; #Sort the keys in the output
$Data::Dumper::Deepcopy = 1; #Enable deep copies of structures
$Data::Dumper::Indent = 1; #Enable enough indentation to read the output
print Dumper ($customization_spec) . "\n";
##
# Do the actual clone
##
Util::trace (0, "\nLink Cloning virtual machine '" . $clone_name . "' from '" . $vm_name . "' ...\n");
eval {
$_->CloneVM(folder => $_->parent,
name => Opts::get_option('vmname_destination'),
spec => $clone_spec);
Util::trace (0, "\nClone '$clone_name' of virtual machine"
. " '$vm_name' successfully created.\n");
};
if ($@) {
if (ref($@) eq 'SoapFault') {
if (ref($@->detail) eq 'FileFault') {
Util::trace(0, "\nFailed to access the virtual "
." machine files\n");
}
elsif (ref($@->detail) eq 'InvalidState') {
Util::trace(0,"The operation is not allowed "
."in the current state.\n");
}
elsif (ref($@->detail) eq 'NotSupported') {
Util::trace(0," Operation is not supported by the "
."current agent \n");
}
elsif (ref($@->detail) eq 'VmConfigFault') {
Util::trace(0,
"Virtual machine is not compatible with the destination host.\n");
}
elsif (ref($@->detail) eq 'InvalidPowerState') {
Util::trace(0,
"The attempted operation cannot be performed "
."in the current state.\n");
}
elsif (ref($@->detail) eq 'DuplicateName') {
Util::trace(0,
"The name '$vm_name' already exists\n");
}
elsif (ref($@->detail) eq 'NoDisksToCustomize') {
Util::trace(0, "\nThe virtual machine has no virtual disks that"
. " are suitable for customization or no guest"
. " is present on given virtual machine" . "\n");
}
elsif (ref($@->detail) eq 'HostNotConnected') {
Util::trace(0, "\nUnable to communicate with the remote host, "
."since it is disconnected" . "\n");
}
elsif (ref($@->detail) eq 'UncustomizableGuest') {
Util::trace(0, "\nCustomization is not supported "
."for the guest operating system" . "\n");
}
else {
Util::trace (0, "Fault" . $@ . "" );
}
}
else {
Util::trace (0, "Fault" . $@ . "" );
}
}
else {
# Clone was sucessful. Perform post clone tasks.
# Move to a folder if specified
my $vm_folder = Opts::get_option('folder');
if ($vm_folder ne "") {
move_vm_to_folder($clone_name, $vm_folder);
}
# Setup NICs and thier backing
foreach (@cspec) {
if ($_->findvalue('NIC')) {
# Network adapters are being defined. Destroy existing ones first.
$clone_view = Vim::find_entity_view(view_type => 'VirtualMachine',
filter =>{ 'name' => $clone_name});
if ($clone_view) {
my $devices = $clone_view->config->hardware->device;
foreach my $vnic_device (@$devices){
if (index($vnic_device->deviceInfo->label, "Network adapter " ) != -1 ) {
#remove the old vNIC
Util::trace(0, "\nRemoving NIC " . ref($vnic_device) );
$config_spec_operation = VirtualDeviceConfigSpecOperation->new('remove');
my $vm_dev_spec = VirtualDeviceConfigSpec->new(device => $vnic_device,
operation => $config_spec_operation);
my $vmChangespec = VirtualMachineConfigSpec->new(deviceChange => [ $vm_dev_spec ] );
eval{
$clone_view->ReconfigVM_Task(spec => $vmChangespec);
};
if ($@) {
Util::trace(0, "\nFailed to remove NIC.");
} else {
Util::trace(0, "\nSuccess.");
}
}
}
} else {
Util::trace(0, "\nCould not find newly created clone");
exit 1
}
@NIC = $_->findnodes('NIC');
# Add back network adapters as defined.
$config_spec_operation = VirtualDeviceConfigSpecOperation->new('add');
foreach (@NIC) {
$nic_network = $_->findvalue('Network');
if ( $_->findvalue('Adapter')) {
$nic_adapter = $_->findvalue('Adapter');
} else {
$nic_adapter = "vmxnet3";
}
my $backing_info = VirtualEthernetCardNetworkBackingInfo->new(deviceName => $nic_network);
my $newNetworkDevice;
switch($nic_adapter) {
case 'e1000' {
$newNetworkDevice = VirtualE1000->new(key => -1,
backing => $backing_info,
addressType => 'Assigned');
}
case 'pcnet' {
$newNetworkDevice = VirtualPCNet32->new(key => -1,
backing => $backing_info,
addressType => 'Assigned');
}
case 'vmxnet2' {
$newNetworkDevice = VirtualVmxnet2->new(key => -1,
backing => $backing_info,
addressType => 'Assigned');
}
case 'vmxnet3' {
$newNetworkDevice = VirtualVmxnet3->new(key => -1,
backing => $backing_info,
addressType => 'Assigned');
}
else {
Util::trace(0, "\nInvalid adapter path $nic_adapter.");
}
}
my $vm_dev_spec = VirtualDeviceConfigSpec->new(device => $newNetworkDevice,
operation => $config_spec_operation);
my $vmChangespec = VirtualMachineConfigSpec->new(deviceChange => [ $vm_dev_spec ] );
eval {
$clone_view->ReconfigVM_Task(spec => $vmChangespec);
};
if ($@) {
Util::trace(0, "\nFailed to add $nic_adapter on $nic_network to $clone_name.");
} else {
Util::trace(0, "\nSuccessfully added $nic_adapter on $nic_network to $clone_name.");
}
}
}
}
# Power on the VM if specified.
if (Opts::get_option('power_vm') eq "yes") {
$clone_view = Vim::find_entity_view(
view_type => "VirtualMachine",
filter => { 'name' => $clone_name },
);
eval {
$clone_view->PowerOnVM_Task();
};
if ($@) {
Util::trace(0, "\nFailed to power on $clone_name" );
} else {
Util::trace(0, "\nSuccessfully powered on $clone_name" );
}
}
}
}
}
}
else {
Util::trace (0, "\nNo virtual machine found with name '$vm_name'\n");
}
}
sub move_vm_to_folder {
my ($vmname, $folder) = @_;
my @subdirs;
#Get the VM object
my $current_views = Vim::find_entity_views((view_type => 'VirtualMachine'), filter => { name => $vmname } );
my $vm_moref = shift @$current_views;
# Separate the folder into its parts
@subdirs = split(/\//,$folder);
# Grab the root folder 'vm'
my $folder_views = Vim::find_entity_views(view_type => 'Folder', filter => { name => 'vm' });
my $vm_folder_ref = shift (@$folder_views);
my $current_ref = $vm_folder_ref;
foreach my $dir(@subdirs) {
#Find the child with the name of $dir and change the current_ref to it
my $children = $current_ref->{childEntity};
foreach my $child(@$children) {
my $child_ref = Vim::get_view(mo_ref => $child);
if ($child_ref->{mo_ref}->{type} eq "Folder") {
if ($child_ref->{name} eq $dir) {
$current_ref = $child_ref;
last;
}
}
}
}
$current_ref->MoveIntoFolder_Task(list => $vm_moref);
}
sub find_snapshot_name {
my ($tree, $name) = @_;
my $ref = undef;
my $count = 0;
foreach my $node (@$tree) {
if ($node->name eq $name) {
$ref = $node;
$count++;
}
my ($subRef, $subCount) = find_snapshot_name($node->childSnapshotList, $name);
$count = $count + $subCount;
$ref = $subRef if ($subCount);
}
return ($ref, $count);
}
#Gets the config_spec for customizing the memory, number of cpu's
# and returns the spec
sub get_config_spec() {
my $parser = XML::LibXML->new();
my $tree = $parser->parse_file(Opts::get_option('filename'));
my $root = $tree->getDocumentElement;
my @cspec = $root->findnodes('Virtual-Machine-Spec');
my $vmname ;
my $vmhost ;
my $guestid;
my $datastore;
my $disksize = 4096; # in KB;
my $memory = 256; # in MB;
my $num_cpus = 1;
my $nic_network;
my $nic_poweron = 1;
foreach (@cspec) {
if ($_->findvalue('Guest-Id')) {
$guestid = $_->findvalue('Guest-Id');
}
if ($_->findvalue('Memory')) {
$memory = $_->findvalue('Memory');
}
if ($_->findvalue('Number-of-CPUS')) {
$num_cpus = $_->findvalue('Number-of-CPUS');
}
$vmname = Opts::get_option('vmname_destination');
}
my $vm_config_spec = VirtualMachineConfigSpec->new(
name => $vmname,
memoryMB => $memory,
numCPUs => $num_cpus,
guestId => $guestid );
return $vm_config_spec;
}
sub get_disksize {
my $disksize = -1;
my $parser = XML::LibXML->new();
eval {
my $tree = $parser->parse_file(Opts::get_option('filename'));
my $root = $tree->getDocumentElement;
my @cspec = $root->findnodes('Virtual-Machine-Spec');
foreach (@cspec) {
$disksize = $_->findvalue('Disksize');
}
};
return $disksize;
}
sub find_disk {
my %args = @_;
my $vm = $args{vm};
my $name = $args{fileName};
my $devices = $vm->config->hardware->device;
foreach my $device(@$devices) {
if($device->isa('VirtualDisk')) {
return $device;
}
}
}
# check missing values of mandatory fields
sub check_missing_value {
my $valid= 1;
my $filename = Opts::get_option('filename');
my $parser = XML::LibXML->new();
my $tree = $parser->parse_file($filename);
my $root = $tree->getDocumentElement;
my @cust_spec = $root->findnodes('Customization-Spec');
my $total = @cust_spec;
if (!$cust_spec[0]->findvalue('Auto-Logon')) {
Util::trace(0,"\nERROR in '$filename':\n autologon value missing ");
$valid = 0;
}
if (!$cust_spec[0]->findvalue('Virtual-Machine-Name')) {
Util::trace(0,"\nERROR in '$filename':\n computername value missing ");
$valid = 0;
}
if (!$cust_spec[0]->findvalue('Timezone')) {
Util::trace(0,"\nERROR in '$filename':\n timezone value missing ");
$valid = 0;
}
if (!$cust_spec[0]->findvalue('Domain')) {
Util::trace(0,"\nERROR in '$filename':\n domain value missing ");
$valid = 0;
}
if (!$cust_spec[0]->findvalue('Domain-User-Name')) {
Util::trace(0,"\nERROR in '$filename':\n domain_user_name value missing ");
$valid = 0;
}
if (!$cust_spec[0]->findvalue('Domain-User-Password')) {
Util::trace(0,"\nERROR in '$filename':\n domain_user_password value missing ");
$valid = 0;
}
if (!$cust_spec[0]->findvalue('Full-Name')) {
Util::trace(0,"\nERROR in '$filename':\n fullname value missing ");
$valid = 0;
}
if (!$cust_spec[0]->findvalue('Orgnization-Name')) {
Util::trace(0,"\nERROR in '$filename':\n Orgnization name value missing ");
$valid = 0;
}
return $valid;
}
sub validate {
my $valid= 1;
if ((Opts::get_option('customize_vm') eq "yes")
|| (Opts::get_option('customize_guest') eq "yes")) {
$valid = XMLValidation::validate_format(Opts::get_option('filename'));
if ($valid == 1) {
$valid = XMLValidation::validate_schema(Opts::get_option('filename'),
Opts::get_option('schema'));
if ($valid == 1) {
$valid = check_missing_value();
}
}
}
if (Opts::option_is_set('customize_vm')) {
if ((Opts::get_option('customize_vm') ne "yes")
&& (Opts::get_option('customize_vm') ne "no")) {
Util::trace(0,"\nMust specify 'yes' or 'no' for customize_vm option");
$valid = 0;
}
}
if (Opts::option_is_set('customize_guest')) {
if ((Opts::get_option('customize_guest') ne "yes")
&& (Opts::get_option('customize_guest') ne "no")) {
Util::trace(0,"\nMust specify 'yes' or 'no' for customize_guest option");
$valid = 0;
}
}
if (Opts::option_is_set('clone_type')) {
if ((Opts::get_option('clone_type') ne "linked")
&& (Opts::get_option('clone_type') ne "copy")) {
Util::trace(0,"\nMust specify 'linked' or 'copy' for clone_type option");
$valid = 0;
}
}
if (Opts::option_is_set('convert')) {
if ((Opts::get_option('convert') ne "source")
&& (Opts::get_option('convert') ne "sesparse")) {
Util::trace(0,"\nMust specify 'source' or 'sesparse' for convert option");
$valid = 0;
}
}
if (Opts::option_is_set('power_vm')) {
if ((Opts::get_option('power_vm') ne "yes")
&& (Opts::get_option('power_vm') ne "no")) {
Util::trace(0,"\nMust specify 'yes' or 'no' for power_vm option");
$valid = 0;
}
}
return $valid;
}
__END__
=head1 NAME
vmclone.pl - Perform clone operation on virtual machine and
customize operation on both virtual machine and the guest.
=head1 SYNOPSIS
vmclone.pl [options]
=head1 DESCRIPTION
VI Perl command-line utility allows you to clone a virtual machine. You
can customize the virtual machine or the guest operating system as part
of the clone operation.
=head1 OPTIONS
=head2 GENERAL OPTIONS
=over
=item B<vmhost>
Required. Name of the host containing the virtual machine.
=item B<vmname>
Required. Name of the virtual machine whose clone is to be created.
=item B<vmname_destination>
Required. Name of the clone virtual machine which will be created.
=item B<datastore>
Optional. Name of a data center. If none is given, the script uses the default data center.
=back
=head2 CUSTOMIZE GUEST OPTIONS
=over
=item B<customize_guest>
Required. Customize guest is used to customize the network settings of the guest
operating system. Options are Yes/No.
=item B<filename>
Required. It is the name of the file in which values of parameters to be
customized is written e.g. --filename clone_vm.xml.
=item B<schema>
Required. It is the name of the schema which validates the filename.
=back
=head2 CUSTOMIZE VM OPTIONS
=over
=item B<customize_vm>
Required. customize_vm is used to customize the virtual machine settings
like disksize, memory. If yes is written it will be customized.
=item B<filename>
Required. It is the name of the file in which values of parameters to be
customized is written e.g. --filename clone_vm.xml.
=item B<schema>
Required. It is the name of the schema which validates the filename.
=back
=head2 INPUT PARAMETERS
=head3 GUEST CUSTOMIZATION
The parameters for customizing the guest os are specified in an XML
file. The structure of the input XML file is:
<Specification>
<Customization-Spec>
</Customization-Spec>
</Specification>
Following are the input parameters:
=over
=item B<Auto-Logon>
Required. Flag to specify whether auto logon should be enabled or disabled.
=item B<Virtual-Machine-Name>
Required. Name of the virtual machine to be created.
=item B<Timezone>
Required. Time zone property of guest OS.
=item B<Domain>
Required. The domain that the virtual machine should join.
=item B<Domain-User-Name>
Required. The domain user account used for authentication.
=item B<Domain-User-Password>
Required. The password for the domain user account used for authentication.
=item B<Full-Name>
Required. User's full name.
=item B<Orgnization-Name>
Required. User's organization.
=back
=head3 VIRTUAL MACHINE CUSTOMIZATION
The parameters for customizing the virtual machine are specified in an XML
file. The structure of the input XML file is:
<Specification>
<Config-Spec-Spec>
<!--Several parameters like Guest-Id, Memory, Disksize, Number-of-CPUS etc-->
</Config-Spec>
</Specification>
Following are the input parameters:
=over
=item B<Guest-Id>
Required. Short guest operating system identifier.
=item B<Memory>
Required. Size of a virtual machine's memory, in MB.
=item B<Number-of-CPUS>
Required. Number of virtual processors in a virtual machine.
=back
See the B<vmcreate.pl> page for an example of a virtual machine XML file.
=head1 EXAMPLES
Making a clone without any customization:
perl vmclone.pl --username username --password mypassword
--vmhost <hostname/ipaddress> --vmname DVM1 --vmname_destination DVM99
--url https://<ipaddress>:<port>/sdk/webService
If datastore is given:
perl vmclone.pl --username username --password mypassword
--vmhost <hostname/ipaddress> --vmname DVM1 --vmname_destination DVM99
--url https://<ipaddress>:<port>/sdk/webService --datastore storage1
Making a clone and customizing the VM:
perl vmclone.pl --username myusername --password mypassword
--vmhost <hostname/ipaddress> --vmname DVM1 --vmname_destination Clone_VM
--url https://<ipaddress>:<port>/sdk/webService --customize_vm yes
--filename clone_vm.xml --schema clone_schema.xsd
Making a clone and customizing the guestOS:
perl vmclone.pl --username myuser --password mypassword --operation clone
--vmhost <hostname/ipaddress> --vmname DVM1 --vmname_destination DVM99
--url https://<ipaddress>:<port>/sdk/webService --customize_guest yes
--filename clone_vm.xml --schema clone_schema.xsd
Making a clone and customizing both guestos and VM:
perl vmclone.pl --username myuser --password mypassword
--vmhost <hostname/ipaddress> --vmname DVM1 --vmname_destination DVM99
--url https://<ipaddress>:<port>/sdk/webService --customize_guest yes
--customize_vm yes --filename clone_vm.xml --schema clone_schema.xsd
All the parameters which are to be customized are written in the vmclone.xml file.
=head1 SUPPORTED PLATFORMS
All operations supported on VirtualCenter 2.0.1 or later.
To perform the clone operation, you must connect to a VirtualCenter server.
| 35.558114 | 115 | 0.500262 |
ed2f83ada935b944d133bfcb09f7c244428a15a2
| 4,432 |
pm
|
Perl
|
auto-lib/Paws/Glue/Table.pm
|
0leksii/aws-sdk-perl
|
b2132fe3c79a06fd15b6137e8a0eb628de722e0f
|
[
"Apache-2.0"
] | 164 |
2015-01-08T14:58:53.000Z
|
2022-02-20T19:16:24.000Z
|
auto-lib/Paws/Glue/Table.pm
|
0leksii/aws-sdk-perl
|
b2132fe3c79a06fd15b6137e8a0eb628de722e0f
|
[
"Apache-2.0"
] | 348 |
2015-01-07T22:08:38.000Z
|
2022-01-27T14:34:44.000Z
|
auto-lib/Paws/Glue/Table.pm
|
0leksii/aws-sdk-perl
|
b2132fe3c79a06fd15b6137e8a0eb628de722e0f
|
[
"Apache-2.0"
] | 87 |
2015-04-22T06:29:47.000Z
|
2021-09-29T14:45:55.000Z
|
# Generated by default/object.tt
package Paws::Glue::Table;
use Moose;
has CatalogId => (is => 'ro', isa => 'Str');
has CreatedBy => (is => 'ro', isa => 'Str');
has CreateTime => (is => 'ro', isa => 'Str');
has DatabaseName => (is => 'ro', isa => 'Str');
has Description => (is => 'ro', isa => 'Str');
has IsRegisteredWithLakeFormation => (is => 'ro', isa => 'Bool');
has LastAccessTime => (is => 'ro', isa => 'Str');
has LastAnalyzedTime => (is => 'ro', isa => 'Str');
has Name => (is => 'ro', isa => 'Str', required => 1);
has Owner => (is => 'ro', isa => 'Str');
has Parameters => (is => 'ro', isa => 'Paws::Glue::ParametersMap');
has PartitionKeys => (is => 'ro', isa => 'ArrayRef[Paws::Glue::Column]');
has Retention => (is => 'ro', isa => 'Int');
has StorageDescriptor => (is => 'ro', isa => 'Paws::Glue::StorageDescriptor');
has TableType => (is => 'ro', isa => 'Str');
has TargetTable => (is => 'ro', isa => 'Paws::Glue::TableIdentifier');
has UpdateTime => (is => 'ro', isa => 'Str');
has ViewExpandedText => (is => 'ro', isa => 'Str');
has ViewOriginalText => (is => 'ro', isa => 'Str');
1;
### main pod documentation begin ###
=head1 NAME
Paws::Glue::Table
=head1 USAGE
This class represents one of two things:
=head3 Arguments in a call to a service
Use the attributes of this class as arguments to methods. You shouldn't make instances of this class.
Each attribute should be used as a named argument in the calls that expect this type of object.
As an example, if Att1 is expected to be a Paws::Glue::Table object:
$service_obj->Method(Att1 => { CatalogId => $value, ..., ViewOriginalText => $value });
=head3 Results returned from an API call
Use accessors for each attribute. If Att1 is expected to be an Paws::Glue::Table object:
$result = $service_obj->Method(...);
$result->Att1->CatalogId
=head1 DESCRIPTION
Represents a collection of related data organized in columns and rows.
=head1 ATTRIBUTES
=head2 CatalogId => Str
The ID of the Data Catalog in which the table resides.
=head2 CreatedBy => Str
The person or entity who created the table.
=head2 CreateTime => Str
The time when the table definition was created in the Data Catalog.
=head2 DatabaseName => Str
The name of the database where the table metadata resides. For Hive
compatibility, this must be all lowercase.
=head2 Description => Str
A description of the table.
=head2 IsRegisteredWithLakeFormation => Bool
Indicates whether the table has been registered with Lake Formation.
=head2 LastAccessTime => Str
The last time that the table was accessed. This is usually taken from
HDFS, and might not be reliable.
=head2 LastAnalyzedTime => Str
The last time that column statistics were computed for this table.
=head2 B<REQUIRED> Name => Str
The table name. For Hive compatibility, this must be entirely
lowercase.
=head2 Owner => Str
The owner of the table.
=head2 Parameters => L<Paws::Glue::ParametersMap>
These key-value pairs define properties associated with the table.
=head2 PartitionKeys => ArrayRef[L<Paws::Glue::Column>]
A list of columns by which the table is partitioned. Only primitive
types are supported as partition keys.
When you create a table used by Amazon Athena, and you do not specify
any C<partitionKeys>, you must at least set the value of
C<partitionKeys> to an empty list. For example:
C<"PartitionKeys": []>
=head2 Retention => Int
The retention time for this table.
=head2 StorageDescriptor => L<Paws::Glue::StorageDescriptor>
A storage descriptor containing information about the physical storage
of this table.
=head2 TableType => Str
The type of this table (C<EXTERNAL_TABLE>, C<VIRTUAL_VIEW>, etc.).
=head2 TargetTable => L<Paws::Glue::TableIdentifier>
A C<TableIdentifier> structure that describes a target table for
resource linking.
=head2 UpdateTime => Str
The last time that the table was updated.
=head2 ViewExpandedText => Str
If the table is a view, the expanded text of the view; otherwise
C<null>.
=head2 ViewOriginalText => Str
If the table is a view, the original text of the view; otherwise
C<null>.
=head1 SEE ALSO
This class forms part of L<Paws>, describing an object used in L<Paws::Glue>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: L<https://github.com/pplu/aws-sdk-perl>
Please report bugs to: L<https://github.com/pplu/aws-sdk-perl/issues>
=cut
| 24.486188 | 102 | 0.698105 |
73f7c534f60bb567d5a339921bce020caec20ec6
| 449 |
pl
|
Perl
|
perl/lib/unicore/lib/Ccc/ATAR.pl
|
mnikolop/Thesis_project_CyberDoc
|
9a37fdd5a31de24cb902ee31ef19eb992faa1665
|
[
"Apache-2.0"
] | 4 |
2018-04-20T07:27:13.000Z
|
2021-12-21T05:19:24.000Z
|
perl/lib/unicore/lib/Ccc/ATAR.pl
|
mnikolop/Thesis_project_CyberDoc
|
9a37fdd5a31de24cb902ee31ef19eb992faa1665
|
[
"Apache-2.0"
] | 4 |
2021-03-10T19:10:00.000Z
|
2021-05-11T14:58:19.000Z
|
perl/lib/unicore/lib/Ccc/ATAR.pl
|
mnikolop/Thesis_project_CyberDoc
|
9a37fdd5a31de24cb902ee31ef19eb992faa1665
|
[
"Apache-2.0"
] | 1 |
2019-11-12T02:29:26.000Z
|
2019-11-12T02:29:26.000Z
|
# !!!!!!! DO NOT EDIT THIS FILE !!!!!!!
# This file is machine-generated by mktables from the Unicode
# database, Version 6.1.0. Any changes made here will be lost!
# !!!!!!! INTERNAL PERL USE ONLY !!!!!!!
# This file is for internal use by core Perl only. The format and even the
# name or existence of this file are subject to change without notice. Don't
# use it directly.
return <<'END';
031B
0F39
1D165 1D166
1D16E 1D172
END
| 26.411765 | 77 | 0.672606 |
73e48cb9d42d80357c3809e227723c443b02e740
| 564 |
pm
|
Perl
|
auto-lib/Paws/S3/GetObjectLockConfigurationOutput.pm
|
meis/aws-sdk-perl
|
6d61ffcf351e446f06d7e84e53caa08d98573959
|
[
"Apache-2.0"
] | null | null | null |
auto-lib/Paws/S3/GetObjectLockConfigurationOutput.pm
|
meis/aws-sdk-perl
|
6d61ffcf351e446f06d7e84e53caa08d98573959
|
[
"Apache-2.0"
] | null | null | null |
auto-lib/Paws/S3/GetObjectLockConfigurationOutput.pm
|
meis/aws-sdk-perl
|
6d61ffcf351e446f06d7e84e53caa08d98573959
|
[
"Apache-2.0"
] | null | null | null |
package Paws::S3::GetObjectLockConfigurationOutput;
use Moose;
has ObjectLockConfiguration => (is => 'ro', isa => 'Paws::S3::ObjectLockConfiguration');
use MooseX::ClassAttribute;
class_has _payload => (is => 'ro', default => 'ObjectLockConfiguration');
has _request_id => (is => 'ro', isa => 'Str');
1;
### main pod documentation begin ###
=head1 NAME
Paws::S3::GetObjectLockConfigurationOutput
=head1 ATTRIBUTES
=head2 ObjectLockConfiguration => L<Paws::S3::ObjectLockConfiguration>
The specified bucket's Object Lock configuration.
=cut
| 19.448276 | 90 | 0.716312 |
ed549517274eebf2fdc7a60d88854ba94a34ae5a
| 510 |
pl
|
Perl
|
Prolog/Tree/22.CountOccurrenceTree.pl
|
angelmpalomares/ModelAndLanguagesForBioInformatics
|
0b981bfcdc2a58ad72da3513e783ef75e53c205c
|
[
"MIT"
] | null | null | null |
Prolog/Tree/22.CountOccurrenceTree.pl
|
angelmpalomares/ModelAndLanguagesForBioInformatics
|
0b981bfcdc2a58ad72da3513e783ef75e53c205c
|
[
"MIT"
] | 1 |
2021-06-08T07:44:38.000Z
|
2021-06-08T07:53:10.000Z
|
Prolog/Tree/22.CountOccurrenceTree.pl
|
angelmpalomares/ModelAndLanguagesForBioInformatics
|
0b981bfcdc2a58ad72da3513e783ef75e53c205c
|
[
"MIT"
] | 2 |
2021-04-11T10:13:57.000Z
|
2021-06-07T23:20:31.000Z
|
% countOccurrenceN(Value,Tree,Result):- Return the number of occurrence of an element in a Tree
countOccurrenceN(_,nil,0):-!.
countOccurrenceN(Root,t(Root,nil,nil),1):-!.
countOccurrenceN(Elem,t(Root,Left,Right),Res):-
Elem \== Root, !, countOccurrenceN(Elem,Left,RLeft),
countOccurrenceN(Elem,Right,RRight),
Res is RLeft+RRight.
countOccurrenceN(Elem,t(Root,Left,Right),Res):-
countOccurrenceN(Elem,Left,RLeft),
countOccurrenceN(Elem,Right,RRight),
Res is RLeft+RRight+1.
| 39.230769 | 95 | 0.713725 |
ed40294a3fbe1589a0743ce08890b2d29a6b39bc
| 952 |
pl
|
Perl
|
www/CiaoDE/ciao/library/argnamesv/examples/simple_db.pl
|
leuschel/ecce
|
f7f834bd219759cd7e8b3709801ffe26082c766d
|
[
"Apache-2.0"
] | 10 |
2015-10-16T08:23:29.000Z
|
2020-08-10T18:17:26.000Z
|
www/CiaoDE/ciao/library.development/argnamesvv/examples/simple_db.pl
|
leuschel/ecce
|
f7f834bd219759cd7e8b3709801ffe26082c766d
|
[
"Apache-2.0"
] | null | null | null |
www/CiaoDE/ciao/library.development/argnamesvv/examples/simple_db.pl
|
leuschel/ecce
|
f7f834bd219759cd7e8b3709801ffe26082c766d
|
[
"Apache-2.0"
] | 3 |
2015-10-18T11:11:44.000Z
|
2019-02-13T14:18:49.000Z
|
:- module(simple_db,_,[argnames,assertions,regtypes]).
:- use_module(library(aggregates)).
:- comment(title,"A simple database application using argument names").
:- pred product/4 :: int * string * string * int.
:- argnames
product( id, description, brand, quantity ).
% ----------------------------------------------------------
product( 1, "Keyboard", "Logitech", 6 ).
product( 2, "Mouse", "Logitech", 5 ).
product( 3, "Monitor", "Philips", 3 ).
product( 4, "Laptop", "Dell", 4 ).
% Compute the stock of products from a given brand.
% Note call to findall is equivalent to: findall(Q,product(_,_,Brand,Q),L).
brand_stock(Brand,Stock) :-
findall(Q,product${brand=>Brand,quantity=>Q},L),
sumlist(L,Stock).
sumlist([],0).
sumlist([X|T],S) :-
sumlist(T,S1),
S is X + S1.
| 36.615385 | 75 | 0.507353 |
ed74dff717f038409f57af4da627d25406fcbadd
| 3,169 |
t
|
Perl
|
openresty-win32-build/thirdparty/perl5-5.29.6/dist/Devel-PPPort/t/magic.t
|
nneesshh/openresty-win32-build
|
bfbb9d7526020eda1788a0ed24f2be3c8be5c1c3
|
[
"MIT"
] | 2 |
2018-06-15T08:32:44.000Z
|
2019-01-12T03:20:41.000Z
|
openresty-win32-build/thirdparty/perl5-5.29.6/dist/Devel-PPPort/t/magic.t
|
nneesshh/openresty-win32-build
|
bfbb9d7526020eda1788a0ed24f2be3c8be5c1c3
|
[
"MIT"
] | null | null | null |
openresty-win32-build/thirdparty/perl5-5.29.6/dist/Devel-PPPort/t/magic.t
|
nneesshh/openresty-win32-build
|
bfbb9d7526020eda1788a0ed24f2be3c8be5c1c3
|
[
"MIT"
] | null | null | null |
################################################################################
#
# !!!!! Do NOT edit this file directly! !!!!!
#
# Edit mktests.PL and/or parts/inc/magic instead.
#
# This file was automatically generated from the definition files in the
# parts/inc/ subdirectory by mktests.PL. To learn more about how all this
# works, please read the F<HACKERS> file that came with this distribution.
#
################################################################################
BEGIN {
if ($ENV{'PERL_CORE'}) {
chdir 't' if -d 't';
@INC = ('../lib', '../ext/Devel-PPPort/t') if -d '../lib' && -d '../ext';
require Config; import Config;
use vars '%Config';
if (" $Config{'extensions'} " !~ m[ Devel/PPPort ]) {
print "1..0 # Skip -- Perl configured without Devel::PPPort module\n";
exit 0;
}
}
else {
unshift @INC, 't';
}
sub load {
eval "use Test";
require 'testutil.pl' if $@;
}
if (23) {
load();
plan(tests => 23);
}
}
use Devel::PPPort;
use strict;
$^W = 1;
package Devel::PPPort;
use vars '@ISA';
require DynaLoader;
@ISA = qw(DynaLoader);
bootstrap Devel::PPPort;
package main;
# Find proper magic
ok(my $obj1 = Devel::PPPort->new_with_mg());
ok(Devel::PPPort::as_string($obj1), 'hello');
# Find with no magic
my $obj = bless {}, 'Fake::Class';
ok(Devel::PPPort::as_string($obj), "Sorry, your princess is in another castle.");
# Find with other magic (not the magic we are looking for)
ok($obj = Devel::PPPort->new_with_other_mg());
ok(Devel::PPPort::as_string($obj), "Sorry, your princess is in another castle.");
# Okay, attempt to remove magic that isn't there
Devel::PPPort::remove_other_magic($obj1);
ok(Devel::PPPort::as_string($obj1), 'hello');
# Remove magic that IS there
Devel::PPPort::remove_null_magic($obj1);
ok(Devel::PPPort::as_string($obj1), "Sorry, your princess is in another castle.");
# Removing when no magic present
Devel::PPPort::remove_null_magic($obj1);
ok(Devel::PPPort::as_string($obj1), "Sorry, your princess is in another castle.");
use Tie::Hash;
my %h;
tie %h, 'Tie::StdHash';
$h{foo} = 'foo';
$h{bar} = '';
&Devel::PPPort::sv_catpv_mg($h{foo}, 'bar');
ok($h{foo}, 'foobar');
&Devel::PPPort::sv_catpvn_mg($h{bar}, 'baz');
ok($h{bar}, 'baz');
&Devel::PPPort::sv_catsv_mg($h{foo}, '42');
ok($h{foo}, 'foobar42');
&Devel::PPPort::sv_setiv_mg($h{bar}, 42);
ok($h{bar}, 42);
&Devel::PPPort::sv_setnv_mg($h{PI}, 3.14159);
ok(abs($h{PI} - 3.14159) < 0.01);
&Devel::PPPort::sv_setpv_mg($h{mhx}, 'mhx');
ok($h{mhx}, 'mhx');
&Devel::PPPort::sv_setpvn_mg($h{mhx}, 'Marcus');
ok($h{mhx}, 'Marcus');
&Devel::PPPort::sv_setsv_mg($h{sv}, 'SV');
ok($h{sv}, 'SV');
&Devel::PPPort::sv_setuv_mg($h{sv}, 4711);
ok($h{sv}, 4711);
&Devel::PPPort::sv_usepvn_mg($h{sv}, 'Perl');
ok($h{sv}, 'Perl');
# v1 is treated as a bareword in older perls...
my $ver = do { local $SIG{'__WARN__'} = sub {}; eval qq[v1.2.0] };
ok("$]" < 5.009 || $@ eq '');
ok("$]" < 5.009 || Devel::PPPort::SvVSTRING_mg($ver));
ok(!Devel::PPPort::SvVSTRING_mg(4711));
my $foo = 'bar';
ok(Devel::PPPort::sv_magic_portable($foo));
ok($foo eq 'bar');
| 26.190083 | 82 | 0.591354 |
ed24478eb9066849cc727624c68e888a781efc2f
| 4,614 |
t
|
Perl
|
t/Flights.t
|
rezhajulio/zeroclickinfo-spice
|
b45d330ebd9d253837ade7a7fb90bbdcf73714ba
|
[
"Apache-2.0"
] | 9 |
2018-04-02T10:10:06.000Z
|
2021-07-07T04:51:46.000Z
|
t/Flights.t
|
rezhajulio/zeroclickinfo-spice
|
b45d330ebd9d253837ade7a7fb90bbdcf73714ba
|
[
"Apache-2.0"
] | null | null | null |
t/Flights.t
|
rezhajulio/zeroclickinfo-spice
|
b45d330ebd9d253837ade7a7fb90bbdcf73714ba
|
[
"Apache-2.0"
] | 1 |
2018-10-02T06:37:27.000Z
|
2018-10-02T06:37:27.000Z
|
#!/usr/bin/env perl
use strict;
use warnings;
use Test::More;
use DDG::Test::Spice;
# get the current UTC time, minus six hours
my ($second, $minute, $hour, $dayOfMonth,
$month, $year, $dayOfWeek, $dayOfYear, $daylightSavings) = gmtime(time - 21600);
$year += 1900;
$month += 1;
# if these tests are run when the hour changes between when the script samples
# the current time and when the script runs the tests below, the tests will break
#
# also, if we rerun the scraper, these tests may need to be updated to reflect
# the new list of active airports and city codes
#
# can we convert the date portion of the comparison string to regex, using
# something like this:
# qr/\/js\/spice\/flights\/route\/CPA\/LAX\/HKG\/LAX\/HKG\/[0-9]{4}\/([0-9]{1}|[0-9]{2})\/([0-9]{1}|[0-9]{2})\/([0-9]{1}|[0-9]{2})/;
ddg_spice_test(
[qw( DDG::Spice::Flights::Route )],
# --- these queries should trigger the IA
# standard query
'Cathay Pacific Los Angeles to Hong Kong Airport' => test_spice(
"/js/spice/flights/route/CPA/LAX/HKG/LAX/HKG/$year/$month/$dayOfMonth/$hour/los%2Bangeles/hong%2Bkong",
call_type => 'include',
caller => 'DDG::Spice::Flights::Route',
),
# standard query
'Jetblue Boston to Los Angeles' => test_spice(
"/js/spice/flights/route/JBU/BOS/LAX/BOS/LAX/$year/$month/$dayOfMonth/$hour/boston/los%2Bangeles",
call_type => 'include',
caller => 'DDG::Spice::Flights::Route',
),
# standard query by airport code
'Jetblue BOS to LAX' => test_spice(
"/js/spice/flights/route/JBU/BOS/LAX/BOS/LAX/$year/$month/$dayOfMonth/$hour/bos/lax",
call_type => 'include',
caller => 'DDG::Spice::Flights::Route',
),
# query with airline at the end
# 'Newark to Paris United' => test_spice(
# "/js/spice/flights/route/UBD%2CUAL/EWR/BVA%2CCDG%2CORY/EWR/BVA/$year/$month/$dayOfMonth/$hour",
# call_type => 'include',
# caller => 'DDG::Spice::Flights::Route',
# ),
# query by airport code with airline at the end
'BOS to LAX Aer Lingus' => test_spice(
"/js/spice/flights/route/EIN/BOS/LAX/BOS/LAX/$year/$month/$dayOfMonth/$hour/bos/lax",
call_type => 'include',
caller => 'DDG::Spice::Flights::Route',
),
# query that generates multiple potential airline matches
# 'American Boston to Los Angeles' => test_spice(
# "/js/spice/flights/route/AAL%2CALC/BOS/LAX/BOS/LAX/$year/$month/$dayOfMonth/$hour",
# call_type => 'include',
# caller => 'DDG::Spice::Flights::Route',
# ),
# query by mixed city/airport code
'Jetblue Boston to JFK' => test_spice(
"/js/spice/flights/route/JBU/BOS/JFK/BOS/JFK/$year/$month/$dayOfMonth/$hour/boston/jfk",
call_type => 'include',
caller => 'DDG::Spice::Flights::Route',
),
'Jetblue JFK to Boston' => test_spice(
"/js/spice/flights/route/JBU/JFK/BOS/JFK/BOS/$year/$month/$dayOfMonth/$hour/jfk/boston",
call_type => 'include',
caller => 'DDG::Spice::Flights::Route',
),
'Boston to JFK Jetblue' => test_spice(
"/js/spice/flights/route/JBU/BOS/JFK/BOS/JFK/$year/$month/$dayOfMonth/$hour/boston/jfk",
call_type => 'include',
caller => 'DDG::Spice::Flights::Route',
),
'JFK to Boston Jetblue' => test_spice(
"/js/spice/flights/route/JBU/JFK/BOS/JFK/BOS/$year/$month/$dayOfMonth/$hour/jfk/boston",
call_type => 'include',
caller => 'DDG::Spice::Flights::Route',
),
'Delta Dallas-Fort Worth International Airport to LAX' => test_spice(
"/js/spice/flights/route/DAL/DFW/LAX/DFW/LAX/$year/$month/$dayOfMonth/$hour/dallas%2Bfort%2Bworth/lax",
call_type => 'include',
caller => 'DDG::Spice::Flights::Route'
),
'delta dallas fort worth to lax' => test_spice(
"/js/spice/flights/route/DAL/DFW/LAX/DFW/LAX/$year/$month/$dayOfMonth/$hour/dallas%2Bfort%2Bworth/lax",
call_type => 'include',
caller => 'DDG::Spice::Flights::Route'
),
# --- these queries should not trigger the IA
# without "to", we cannot determine the source and destination
'Boston Paris Aer Lingus' => undef,
# the airline must appear at the beginning or the end
'Boston Aer Lingus to Paris' => undef,
# currently, we do not parse queries with "from" in between cities
'Boston from Los Angeles Jetblue' => undef,
'Boston' => undef,
'Jetblue' => undef,
'Boston Paris' => undef,
);
alt_to_test('DDG::Spice::Flights::Route', ['route_helper']);
done_testing;
| 35.767442 | 132 | 0.628739 |
ed46d17936cee207169874320596fa61407b15d9
| 2,410 |
pm
|
Perl
|
storage/quantum/dxi/ssh/plugin.pm
|
garnier-quentin/centreon-plugins
|
51c16b4419d640709d3352a260e4cd63cd96db14
|
[
"Apache-2.0"
] | null | null | null |
storage/quantum/dxi/ssh/plugin.pm
|
garnier-quentin/centreon-plugins
|
51c16b4419d640709d3352a260e4cd63cd96db14
|
[
"Apache-2.0"
] | null | null | null |
storage/quantum/dxi/ssh/plugin.pm
|
garnier-quentin/centreon-plugins
|
51c16b4419d640709d3352a260e4cd63cd96db14
|
[
"Apache-2.0"
] | null | null | null |
#
# Copyright 2018 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package storage::quantum::dxi::ssh::plugin;
use strict;
use warnings;
use base qw(centreon::plugins::script_simple);
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
$self->{version} = '0.1';
%{$self->{modes}} = (
'compaction' => 'storage::quantum::dxi::ssh::mode::compaction',
'disk-usage' => 'storage::quantum::dxi::ssh::mode::diskusage',
'hostbus-adapter-status' => 'storage::quantum::dxi::ssh::mode::hostbusadapterstatus',
'health' => 'storage::quantum::dxi::ssh::mode::health',
'memory' => 'storage::quantum::dxi::ssh::mode::memory',
'network' => 'storage::quantum::dxi::ssh::mode::network',
'reclamation' => 'storage::quantum::dxi::ssh::mode::reclamation',
'reduction' => 'storage::quantum::dxi::ssh::mode::reduction',
'storage-array-status' => 'storage::quantum::dxi::ssh::mode::storagearraystatus',
'system-status' => 'storage::quantum::dxi::ssh::mode::systemstatus',
'throughput' => 'storage::quantum::dxi::ssh::mode::throughput',
);
return $self;
}
1;
__END__
=head1 PLUGIN DESCRIPTION
Check Quantum DXi series appliances through SSH commands.
=cut
| 40.847458 | 116 | 0.565975 |
ed0575f0afb6e54e6d923e45cf9b4a09344623c7
| 922 |
pm
|
Perl
|
lib/WWW/Shopify/Model/Locale.pm
|
gitpan/WWW-Shopify
|
359a1a7e421668fee2843440b00bf218f3d8854e
|
[
"MIT"
] | null | null | null |
lib/WWW/Shopify/Model/Locale.pm
|
gitpan/WWW-Shopify
|
359a1a7e421668fee2843440b00bf218f3d8854e
|
[
"MIT"
] | null | null | null |
lib/WWW/Shopify/Model/Locale.pm
|
gitpan/WWW-Shopify
|
359a1a7e421668fee2843440b00bf218f3d8854e
|
[
"MIT"
] | null | null | null |
#!/usr/bin/perl
use strict;
use warnings;
use WWW::Shopify;
package WWW::Shopify::Model::Locale;
use parent "WWW::Shopify::Model::Item";
sub countable { return undef; }
sub creatable { return undef; }
sub updatable { return undef; }
sub deletable { return undef; }
my $fields; sub fields { return $fields; }
BEGIN { $fields = {
"id" => new WWW::Shopify::Field::Identifier(),
"name" => new WWW::Shopify::Field::String(),
"owner_email" => new WWW::Shopify::Field::String(),
"owner_id" => new WWW::Shopify::Field::BigInt(),
"owner_name" => new WWW::Shopify::Field::String(),
"user_count" => new WWW::Shopify::Field::Int(),
"shop_count" => new WWW::Shopify::Field::Int(),
"progress" => new WWW::Shopify::Field::Int(),
"authorships" => new WWW::Shopify::Field::Relation::Many("WWW::Shopify::Model::Locale::Authorship")
}; }
sub needs_login { return 1; }
eval(__PACKAGE__->generate_accessors); die $@ if $@;
1
| 27.117647 | 100 | 0.664859 |
ed34285fee6f0e02e8ace2546c3c7b4f5ebe8c77
| 9,901 |
pm
|
Perl
|
auto-lib/Paws/Lightsail/GetInstanceMetricData.pm
|
0leksii/aws-sdk-perl
|
b2132fe3c79a06fd15b6137e8a0eb628de722e0f
|
[
"Apache-2.0"
] | 164 |
2015-01-08T14:58:53.000Z
|
2022-02-20T19:16:24.000Z
|
auto-lib/Paws/Lightsail/GetInstanceMetricData.pm
|
0leksii/aws-sdk-perl
|
b2132fe3c79a06fd15b6137e8a0eb628de722e0f
|
[
"Apache-2.0"
] | 348 |
2015-01-07T22:08:38.000Z
|
2022-01-27T14:34:44.000Z
|
auto-lib/Paws/Lightsail/GetInstanceMetricData.pm
|
0leksii/aws-sdk-perl
|
b2132fe3c79a06fd15b6137e8a0eb628de722e0f
|
[
"Apache-2.0"
] | 87 |
2015-04-22T06:29:47.000Z
|
2021-09-29T14:45:55.000Z
|
package Paws::Lightsail::GetInstanceMetricData;
use Moose;
has EndTime => (is => 'ro', isa => 'Str', traits => ['NameInRequest'], request_name => 'endTime' , required => 1);
has InstanceName => (is => 'ro', isa => 'Str', traits => ['NameInRequest'], request_name => 'instanceName' , required => 1);
has MetricName => (is => 'ro', isa => 'Str', traits => ['NameInRequest'], request_name => 'metricName' , required => 1);
has Period => (is => 'ro', isa => 'Int', traits => ['NameInRequest'], request_name => 'period' , required => 1);
has StartTime => (is => 'ro', isa => 'Str', traits => ['NameInRequest'], request_name => 'startTime' , required => 1);
has Statistics => (is => 'ro', isa => 'ArrayRef[Str|Undef]', traits => ['NameInRequest'], request_name => 'statistics' , required => 1);
has Unit => (is => 'ro', isa => 'Str', traits => ['NameInRequest'], request_name => 'unit' , required => 1);
use MooseX::ClassAttribute;
class_has _api_call => (isa => 'Str', is => 'ro', default => 'GetInstanceMetricData');
class_has _returns => (isa => 'Str', is => 'ro', default => 'Paws::Lightsail::GetInstanceMetricDataResult');
class_has _result_key => (isa => 'Str', is => 'ro');
1;
### main pod documentation begin ###
=head1 NAME
Paws::Lightsail::GetInstanceMetricData - Arguments for method GetInstanceMetricData on L<Paws::Lightsail>
=head1 DESCRIPTION
This class represents the parameters used for calling the method GetInstanceMetricData on the
L<Amazon Lightsail|Paws::Lightsail> service. Use the attributes of this class
as arguments to method GetInstanceMetricData.
You shouldn't make instances of this class. Each attribute should be used as a named argument in the call to GetInstanceMetricData.
=head1 SYNOPSIS
my $lightsail = Paws->service('Lightsail');
my $GetInstanceMetricDataResult = $lightsail->GetInstanceMetricData(
EndTime => '1970-01-01T01:00:00',
InstanceName => 'MyResourceName',
MetricName => 'CPUUtilization',
Period => 1,
StartTime => '1970-01-01T01:00:00',
Statistics => [
'Minimum', ... # values: Minimum, Maximum, Sum, Average, SampleCount
],
Unit => 'Seconds',
);
# Results:
my $MetricData = $GetInstanceMetricDataResult->MetricData;
my $MetricName = $GetInstanceMetricDataResult->MetricName;
# Returns a L<Paws::Lightsail::GetInstanceMetricDataResult> object.
Values for attributes that are native types (Int, String, Float, etc) can passed as-is (scalar values). Values for complex Types (objects) can be passed as a HashRef. The keys and values of the hashref will be used to instance the underlying object.
For the AWS API documentation, see L<https://docs.aws.amazon.com/goto/WebAPI/lightsail/GetInstanceMetricData>
=head1 ATTRIBUTES
=head2 B<REQUIRED> EndTime => Str
The end time of the time period.
=head2 B<REQUIRED> InstanceName => Str
The name of the instance for which you want to get metrics data.
=head2 B<REQUIRED> MetricName => Str
The metric for which you want to return information.
Valid instance metric names are listed below, along with the most
useful C<statistics> to include in your request, and the published
C<unit> value.
=over
=item *
B<C<BurstCapacityPercentage> > - The percentage of CPU performance
available for your instance to burst above its baseline. Your instance
continuously accrues and consumes burst capacity. Burst capacity stops
accruing when your instance's C<BurstCapacityPercentage> reaches 100%.
For more information, see Viewing instance burst capacity in Amazon
Lightsail
(https://lightsail.aws.amazon.com/ls/docs/en_us/articles/amazon-lightsail-viewing-instance-burst-capacity).
C<Statistics>: The most useful statistics are C<Maximum> and
C<Average>.
C<Unit>: The published unit is C<Percent>.
=item *
B<C<BurstCapacityTime> > - The available amount of time for your
instance to burst at 100% CPU utilization. Your instance continuously
accrues and consumes burst capacity. Burst capacity time stops accruing
when your instance's C<BurstCapacityPercentage> metric reaches 100%.
Burst capacity time is consumed at the full rate only when your
instance operates at 100% CPU utilization. For example, if your
instance operates at 50% CPU utilization in the burstable zone for a
5-minute period, then it consumes CPU burst capacity minutes at a 50%
rate in that period. Your instance consumed 2 minutes and 30 seconds of
CPU burst capacity minutes in the 5-minute period. For more
information, see Viewing instance burst capacity in Amazon Lightsail
(https://lightsail.aws.amazon.com/ls/docs/en_us/articles/amazon-lightsail-viewing-instance-burst-capacity).
C<Statistics>: The most useful statistics are C<Maximum> and
C<Average>.
C<Unit>: The published unit is C<Seconds>.
=item *
B<C<CPUUtilization> > - The percentage of allocated compute units that
are currently in use on the instance. This metric identifies the
processing power to run the applications on the instance. Tools in your
operating system can show a lower percentage than Lightsail when the
instance is not allocated a full processor core.
C<Statistics>: The most useful statistics are C<Maximum> and
C<Average>.
C<Unit>: The published unit is C<Percent>.
=item *
B<C<NetworkIn> > - The number of bytes received on all network
interfaces by the instance. This metric identifies the volume of
incoming network traffic to the instance. The number reported is the
number of bytes received during the period. Because this metric is
reported in 5-minute intervals, divide the reported number by 300 to
find Bytes/second.
C<Statistics>: The most useful statistic is C<Sum>.
C<Unit>: The published unit is C<Bytes>.
=item *
B<C<NetworkOut> > - The number of bytes sent out on all network
interfaces by the instance. This metric identifies the volume of
outgoing network traffic from the instance. The number reported is the
number of bytes sent during the period. Because this metric is reported
in 5-minute intervals, divide the reported number by 300 to find
Bytes/second.
C<Statistics>: The most useful statistic is C<Sum>.
C<Unit>: The published unit is C<Bytes>.
=item *
B<C<StatusCheckFailed> > - Reports whether the instance passed or
failed both the instance status check and the system status check. This
metric can be either 0 (passed) or 1 (failed). This metric data is
available in 1-minute (60 seconds) granularity.
C<Statistics>: The most useful statistic is C<Sum>.
C<Unit>: The published unit is C<Count>.
=item *
B<C<StatusCheckFailed_Instance> > - Reports whether the instance passed
or failed the instance status check. This metric can be either 0
(passed) or 1 (failed). This metric data is available in 1-minute (60
seconds) granularity.
C<Statistics>: The most useful statistic is C<Sum>.
C<Unit>: The published unit is C<Count>.
=item *
B<C<StatusCheckFailed_System> > - Reports whether the instance passed
or failed the system status check. This metric can be either 0 (passed)
or 1 (failed). This metric data is available in 1-minute (60 seconds)
granularity.
C<Statistics>: The most useful statistic is C<Sum>.
C<Unit>: The published unit is C<Count>.
=back
Valid values are: C<"CPUUtilization">, C<"NetworkIn">, C<"NetworkOut">, C<"StatusCheckFailed">, C<"StatusCheckFailed_Instance">, C<"StatusCheckFailed_System">, C<"BurstCapacityTime">, C<"BurstCapacityPercentage">
=head2 B<REQUIRED> Period => Int
The granularity, in seconds, of the returned data points.
The C<StatusCheckFailed>, C<StatusCheckFailed_Instance>, and
C<StatusCheckFailed_System> instance metric data is available in
1-minute (60 seconds) granularity. All other instance metric data is
available in 5-minute (300 seconds) granularity.
=head2 B<REQUIRED> StartTime => Str
The start time of the time period.
=head2 B<REQUIRED> Statistics => ArrayRef[Str|Undef]
The statistic for the metric.
The following statistics are available:
=over
=item *
C<Minimum> - The lowest value observed during the specified period. Use
this value to determine low volumes of activity for your application.
=item *
C<Maximum> - The highest value observed during the specified period.
Use this value to determine high volumes of activity for your
application.
=item *
C<Sum> - All values submitted for the matching metric added together.
You can use this statistic to determine the total volume of a metric.
=item *
C<Average> - The value of Sum / SampleCount during the specified
period. By comparing this statistic with the Minimum and Maximum
values, you can determine the full scope of a metric and how close the
average use is to the Minimum and Maximum values. This comparison helps
you to know when to increase or decrease your resources.
=item *
C<SampleCount> - The count, or number, of data points used for the
statistical calculation.
=back
=head2 B<REQUIRED> Unit => Str
The unit for the metric data request. Valid units depend on the metric
data being requested. For the valid units to specify with each
available metric, see the C<metricName> parameter.
Valid values are: C<"Seconds">, C<"Microseconds">, C<"Milliseconds">, C<"Bytes">, C<"Kilobytes">, C<"Megabytes">, C<"Gigabytes">, C<"Terabytes">, C<"Bits">, C<"Kilobits">, C<"Megabits">, C<"Gigabits">, C<"Terabits">, C<"Percent">, C<"Count">, C<"Bytes/Second">, C<"Kilobytes/Second">, C<"Megabytes/Second">, C<"Gigabytes/Second">, C<"Terabytes/Second">, C<"Bits/Second">, C<"Kilobits/Second">, C<"Megabits/Second">, C<"Gigabits/Second">, C<"Terabits/Second">, C<"Count/Second">, C<"None">
=head1 SEE ALSO
This class forms part of L<Paws>, documenting arguments for method GetInstanceMetricData in L<Paws::Lightsail>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: L<https://github.com/pplu/aws-sdk-perl>
Please report bugs to: L<https://github.com/pplu/aws-sdk-perl/issues>
=cut
| 35.873188 | 488 | 0.740935 |
ed0407cd6d878bd26dbc28db2ffca3fd18999e19
| 34,294 |
pl
|
Perl
|
pxpjcid.pl
|
munepi/PXacid
|
2b6bbd6dd2e73020582f65c9a28419c596f1af71
|
[
"MIT"
] | null | null | null |
pxpjcid.pl
|
munepi/PXacid
|
2b6bbd6dd2e73020582f65c9a28419c596f1af71
|
[
"MIT"
] | null | null | null |
pxpjcid.pl
|
munepi/PXacid
|
2b6bbd6dd2e73020582f65c9a28419c596f1af71
|
[
"MIT"
] | null | null | null |
#!/usr/bin/perl
# pxpjcid.pl
#
use strict;
our $xetex = "xetex -interaction=batchmode";
our $pltotf = "pltotf";
our $pltotf = "uppltotf";
our $opl2ofm = "opl2ofm";
our $ovp2ovf = "ovp2ovf";
#
our $prog_name = "pxpjcid";
our $version = "0.3.1";
our $mod_date = "2017/04/21";
our $temp_base = "__$prog_name$$";
our $gid_offset = 0;
##-----------------------------------------------------------
## Mapping between TeX slots and Unicode points
use Encode qw( decode );
our $tex2ucs_table = {
'JY1' => sub {
my ($s) = $_[0];
my $u = ord(decode('jis0208-raw', pack('n', $s)));
return ($u == 0xFFFD) ? undef : $u;
},
'JY2' => sub {
return $_[0];
}
};
# tex2ucs($enc, $tc)
sub tex2ucs {
my ($enc, $tc) = @_;
return $tex2ucs_table->{$enc}($tc);
}
##----------------------------------------------------------
## Mapping from Unicode to AJ1 CID
our $ucs2aj_table;
{
local ($/, $_); my (%t);
$_ = <DATA>; %t = eval($_);
for (0x3400 .. 0x4DBF) { $t{$_} = 0; }
for (0x4E00 .. 0x9FFF) { $t{$_} = 0; }
for (0xF900 .. 0xFAFF) { $t{$_} = 0; }
$ucs2aj_table = \%t;
}
## ucs2aj($uc)
sub ucs2aj {
my $t = $ucs2aj_table->{$_[0]};
return ($t) ? ($t + $gid_offset) : $t;
}
our $target_ucs;
our $target_aj;
sub gen_target_list {
$target_ucs = [ sort { $a <=> $b } (keys %$ucs2aj_table) ];
my %chk = reverse %$ucs2aj_table;
$target_aj = [ map { $_ + $gid_offset }
(sort { $a <=> $b } (keys %chk)) ];
}
our $enc_list = [ keys %$tex2ucs_table ];
##----------------------------------------------------------
## Retrieval of glyph metric by means of XeTeX
# get_metric($font, $index, $chars)
sub get_metric {
my ($font, $index, $chars) = @_;
write_whole("$temp_base.tex", query_xetex($font, $index, $chars), 1);
if (-s "$prog_name-save.log") {
my $t = read_whole("$prog_name-save.log", 1);
write_whole("$temp_base.log", $t, 1);
} else {
system "$xetex $temp_base";
}
(-s "$temp_base.log")
or error("XeTeX execution failed");
my $lin; my $par = {};
open(my $hi, '<', "$temp_base.log")
or error("cannot read file", "$temp_base.log");
while ($lin = <$hi>) {
if ($lin =~ m/^! /) {
error("error occurred in XeTeX process");
} elsif ($lin =~ m/^!OUT!(.*)$/) {
nest_assign($par, $1);
}
}
close($hi);
#
derive_param($par);
return $par;
}
# nest_assign($base, $text)
sub nest_assign {
my ($base, $text) = @_;
my ($pname, $value) = ($text =~ m/^(.*)=(.*)$/) or die;
my @plist = split(m/:/, $pname);
if ($value =~ m/^(-?\d+\.\d+)pt$/) {
$value = $1 / 10;
}
nest_assign_sub($base, \@plist, $value);
}
sub nest_assign_sub {
my ($hash, $plist, $value) = @_;
my ($name, @plist1) = @$plist;
if (!@plist1) {
$hash->{$name} = $value;
} else {
(exists $hash->{$name}) or $hash->{$name} = {};
nest_assign_sub($hash->{$name}, \@plist1, $value);
}
}
# query_xetex($font, $index, $chars)
sub query_xetex {
my ($font, $index, $chars) = @_; my ($t);
(defined $index) and $font = "$font:$index";
local $_ = <<'END';
\font\fontU="[?FONT?]"
\newcount\cntC
\newcount\cntM
\newbox\boxA
\def\writeLog#1{\immediate\write-1{#1}}
\def\outData#1{\writeLog{!OUT!#1}}
\def\doForEachAj{?DO_AJ?}
\def\getMetricAj#1#2{%
#1\def\pname{#2}\cntM=\XeTeXcountglyphs\font
\let\do\doGetMetricAj \doForEachAj}
\def\doGetMetricAj#1{%
\cntC=#1
\ifnum\cntC<\cntM
\setbox\boxA=\hbox{\XeTeXglyph\cntC}%
\outData{\pname:#1:wd=\the\wd\boxA}%
\outData{\pname:#1:ht=\the\ht\boxA}%
\outData{\pname:#1:dp=\the\dp\boxA}%
\fi}
\getMetricAj\fontU{aj}
\bye
END
s/%%.*$/%/gm; s/\?FONT\?/$font/g;
$t = do_list($target_aj); s/\?DO_AJ\?/$t/g;
return $_;
}
# do_list($vals)
sub do_list {
my ($vals) = @_;
return join("%\n", map { "\\do{$_}" } (@$vals));
}
# derive_param($par)
sub derive_param {
my ($par) = @_; my ($cc);
# xheight
(defined($cc = ucs2aj(ord('x'))) &&
defined($par->{xheight} = $par->{aj}{$cc}{ht}))
or $par->{xheight} = 0.5;
# capheight
(defined($cc = ucs2aj(ord('I'))) &&
defined($par->{capheight} = $par->{aj}{$cc}{ht}))
or $par->{capheight} = 0.75;
# ascheight
(defined($cc = ucs2aj(ord('h'))) &&
defined($par->{ascheight} = $par->{aj}{$cc}{ht}))
or $par->{ascheight} = 0.75;
# space
(defined($cc = ucs2aj(ord(' '))) &&
defined($par->{space} = $par->{aj}{$cc}{wd}))
or $par->{space} = 0.5;
}
##----------------------------------------------------------
## Generating TeX metric files
use constant { STDHT => 0.88, STDDP => 0.12 };
# source_opl($par, $fam)
sub source_opl {
my ($par, $fam) = @_;
my @cnks = (<<"END");
(OFMLEVEL H 0)
(FAMILY $fam)
(CODINGSCHEME )
(FONTDIMEN
(XHEIGHT R 1.0)
(QUAD R 1.0)
)
END
my $paraj = $par->{aj};
foreach my $cc (@$target_aj) {
my $t = $paraj->{$cc};
(defined $t) or next;
my ($wd, $ht, $dp) = ($t->{wd}, $t->{ht}, $t->{dp});
push(@cnks, <<"END");
(CHARACTER H @{[FH($cc)]}
(CHARWD R @{[FR($wd)]})
(CHARHT R @{[FR($ht)]})
(CHARDP R @{[FR($dp)]})
)
END
}
return join('', @cnks);
}
# source_virtual()
sub source_virtual {
my ($par, $fam, $ser, $enc, $tfmfam, $wdp) = @_;
my (%rwd, @rwd, @rct);
if ($wdp) { info("width precision set to $wdp"); }
# OVP CHARACTER
my (@pccnks, @ptcnks, @vccnks, @valid);
my ($stc, $etc) = tc_range($enc);
foreach my $tc ($stc .. $etc) {
my $uc = tex2ucs($enc, $tc) or next;
my $dat = resolve_map($uc, $par, $tc, $wdp);
#$valid[$tc] = 1; $ctchar += 1;
my ($map, $wd, $ht, $dp, $rdwd) = @$dat;
if (@$map) {
$map = join("\n", map { " $_" } (@$map));
$map = " (MAP\n$map\n )";
} else { $map = " (MAP)"; }
push(@vccnks, <<"END");
(CHARACTER H @{[FH($tc)]}
(CHARWD R @{[FR($rdwd)]})
$map
)
END
# PL
if ($rdwd != 1.0) {
push(@{$rwd{$rdwd}}, $tc);
}
}
#
{
my @w = sort { $a <=> $b } (keys %rwd);
my $nct = scalar(@w);
if ($nct > 250) {
info("Char type limit exceeded ($nct > 250)");
return;
}
foreach (0 .. $#w) {
$rwd[$_ + 1] = [$w[$_], $rwd{$w[$_]}];
}
$rwd[0] = [1.0];
}
foreach my $ct (0 .. $#rwd) {
(defined $rwd[$ct]) or die;
my ($wd, $cs) = @{$rwd[$ct]};
push(@ptcnks, <<"END");
(TYPE D $ct
(CHARWD R @{[FR($wd)]})
(CHARHT R @{[FR(STDHT)]})
(CHARDP R @{[FR(STDDP)]})
)
END
if (defined $cs) {
push(@pccnks, "(CHARSINTYPE D $ct\n");
foreach my $uc (@$cs) {
push(@pccnks, sprintf("U%04X\n", $uc));
}
push(@pccnks, " )\n");
}
}
# PL LIGTABLE
my @lkcnks = ();
push(@lkcnks, "(LABEL D 5)", "(KRN D 5 R 0.0)", "(STOP)");
@lkcnks = map { " $_\n" } (@lkcnks);
# entire OVP
my $rjfmname = fontname($tfmfam, $ser, $enc, 1);
my $ofmname = fontname($tfmfam, $ser, 'J40');
my $ovp = join('', <<"END", @vccnks);
(OFMLEVEL H 0)
(VTITLE $fam)
(FAMILY $fam)
(FONTDIMEN
(QUAD R 1.0)
(XHEIGHT R 1.0)
)
(MAPFONT D 0
(FONTNAME $rjfmname)
)
(MAPFONT D 1
(FONTNAME $ofmname)
)
END
# entire PL
my $space = $par->{space};
my $xheight = $par->{xheight};
my $slant = 0;
my $pl = join('', <<"END1", @lkcnks, <<"END2", @pccnks, @ptcnks);
(FAMILY $fam)
(CODINGSCHEME )
(FONTDIMEN
(SLANT R 0.0)
(SPACE R 0.0)
(STRETCH R 0.06)
(SHRINK R 0.0)
(XHEIGHT R 1.0)
(QUAD R 1.0)
(EXTRASPACE R 0.12)
(EXTRASTRETCH R 0.06)
(EXTRASHRINK R 0.03)
)
(GLUEKERN
END1
)
END2
# done
return ($pl, $ovp);
}
sub resolve_map {
my ($uc, $par, $tc, $wdp) = @_;
my $paraj = $par->{aj};
{
my $cc = ucs2aj($uc) or last;
my $t = $paraj->{$cc} or last;
my $wd = $t->{wd}; my $rdwd = $wd;
if ($wdp) {
$rdwd = int($wd / $wdp + 0.5) * $wdp;
}
return [ [ "(SELECTFONT D 1)", "(SETCHAR H @{[FH($cc)]})" ],
$wd, $t->{ht}, $t->{dp}, $rdwd ];
}
return [ [ "(SETCHAR H @{[FH($tc)]})" ],
1.0, STDHT, STDDP, 1.0 ];
}
sub standard_pl {
return <<'END';
(FAMILY )
(CODINGSCHEME )
(FONTDIMEN
(SLANT R 0.0)
(SPACE R 0.0)
(STRETCH R 0.0)
(SHRINK R 0.0)
(XHEIGHT R 1.0)
(QUAD R 1.0)
(EXTRASPACE R 0.0)
(EXTRASTRETCH R 0.0)
(EXTRASHRINK R 0.0)
)
(TYPE O 0
(CHARWD R 1.0)
(CHARHT R 0.9)
(CHARDP R 0.1)
)
END
}
# FR($value)
sub FR {
local $_ = sprintf("%.7f", $_[0]); s/0+$/0/; return $_;
}
# FH($value)
sub FH {
return sprintf("%X", $_[0]);
}
# tc_range($enc)
our $range = {
'JY1' => [ 0x2121, 0x7E7E ],
'JY2' => [ 0x0, 0xFFFF ],
};
sub tc_range { return @{$range->{$_[0]}}; }
# standard_cmap($enc)
our $std_cmap = {
'JY1' => "H",
'JY2' => "UniJIS-UTF16-H",
};
sub standard_cmap { return $std_cmap->{$_[0]}; }
# use_berry($sw)
sub use_berry { } # no-pp
# NFSS series -> Berry
our $ser_kb = {
ul => 'a', # UltraLight
el => 'j', # ExtraLight
l => 'l', # Light
m => 'r', # Regular
mb => 'm', # Medium
db => 'd', # DemiBold
sb => 's', # SemiBold
b => 'b', # Bold
bx => 'b', # Bold
eb => 'x', # Extra
ub => 'u' # Ultra
};
# counterpart
our $enc_tate = {
JY1 => 'JT1', JY2 => 'JT2'
};
# fontname($tfmfam, $ser, $enc, $raw)
sub fontname {
my ($tfmfam, $ser, $enc, $raw) = @_;
$raw = ($raw) ? "r-" : "";
$ser = $ser_kb->{$ser};
$enc = lc($enc);
return "$raw$tfmfam-$ser-$enc";
}
##----------------------------------------------------------
## LaTeX font definition files
# set_scale($scale)
our $scale = 0.924715;
sub set_scale {
$scale = $_[0] if (defined $_[0]);
}
# source_fd($fam, $ser, $enc, $tfmfam, $orgsrc)
sub source_fd {
my ($fam, $ser, $enc, $tfmfam, $orgsrc) = @_;
my (%spec, @pos, $ser1, $text);
my $tenc = $enc_tate->{$enc} or die;
my $rx = qr/^\\DeclareFontShape\{$enc\}\{$fam\}
\{(\w+)\}\{n\}\{<->(?:s\*\[[\d\.]+\])(.*?)\}/x;
# parse original
foreach my $lin (split(m/\n/, $orgsrc)) {
if (($ser1, $text) = $lin =~ $rx) {
push(@pos, $ser1);
$spec{$ser1} = ($text =~ m/^ssub\*/) ? undef : $text;
}
}
if (!@pos) {
foreach $ser1 ('m', 'b', 'bx') {
push(@pos, $ser1); $spec{$ser1} = undef;
}
}
if (!exists $spec{$ser}) { push(@pos, $ser); }
$spec{$ser} = fontname($tfmfam, $ser, $enc);
#
my $bfser;
foreach my $ent (@pos) {
(defined $spec{$ent}) or next;
if ($ent =~ m|^bx?$|) { $bfser = 1; }
}
# generate new
my (@cnks, $text, @cnkst, $textt);
foreach $ser1 (@pos) {
if (defined $spec{$ser1}) {
$text = "s*[$scale]" . $spec{$ser1};
} else {
my $ser2 = ($ser1 eq 'm') ? $ser :
($bfser && $ser1 eq 'b') ? 'bx' :
($bfser && $ser1 eq 'bx') ? 'b' : 'm';
$text = "ssub*$fam/$ser2/n";
}
my $text2 = "ssub*$fam/$ser1/n";
my $text3 = "ssub*mc/m/n";
push(@cnks,
"\\DeclareFontShape{$enc}{$fam}{$ser1}{n}{<->$text}{}",
"\\DeclareFontShape{$enc}{$fam}{$ser1}{it}{<->$text2}{}",
"\\DeclareFontShape{$enc}{$fam}{$ser1}{sl}{<->$text2}{}");
push(@cnkst,
"\\DeclareFontShape{$tenc}{$fam}{$ser1}{n}{<->$text3}{}",
"\\DeclareFontShape{$tenc}{$fam}{$ser1}{it}{<->$text3}{}",
"\\DeclareFontShape{$tenc}{$fam}{$ser1}{sl}{<->$text3}{}");
}
$text = join("\n", @cnks);
$textt = join("\n", @cnkst);
#
my $fdname = lc("$enc$fam");
my $tfdname = lc("$tenc$fam");
return (<<"END1", <<"END2");
% $fdname.fd
\\DeclareFontFamily{$enc}{$fam}{}
$text
%% EOF
END1
% $tfdname.fd
\\DeclareFontFamily{$tenc}{$fam}{}
$textt
%% EOF
END2
}
##----------------------------------------------------------
## dvipdfmx map files
# source_map($fam, $ser, $tfmfam, $font, $index, $orgsrc, $encset)
sub source_map {
my ($fam, $ser, $tfmfam, $font, $index, $orgsrc, $encset) = @_;
(defined $index) and $font = ":$index:$font";
my @spec;
foreach my $lin (split(m/\n/, $orgsrc)) {
if ($lin !~ m/^\s*(\#|$)/) {
push(@spec, $lin);
}
}
my $ofmname = fontname($tfmfam, $ser, 'J40');
push(@spec, "$ofmname Identity-H $font");
foreach my $enc (@$encset) {
my $rjfmname = fontname($tfmfam, $ser, $enc, 1);
my $cmap = standard_cmap($enc);
push(@spec, "$rjfmname $cmap $font");
}
my $text = join("\n", @spec);
return <<"END";
# pdfm-$fam.map
$text
# EOF
END
}
##----------------------------------------------------------
## LaTeX style files
# source_style($font)
sub source_style {
my ($fam) = @_; local ($_);
$_ = <<'END';
% pxpjcid-?FAM?.sty
\NeedsTeXFormat{pLaTeX2e}
\ProvidesPackage{pxpjcid-?FAM?}
\DeclareRobustCommand*{\?FAM?family}{%
\not@math@alphabet\?FAM?family\relax
\fontfamily{?FAM?}\selectfont}
\DeclareTextFontCommand{\text?FAM?}{\?FAM?family}
% EOF
END
s/\?FAM\?/$fam/g;
return $_;
}
my $testtext; # My hovercraft is full of eels.
{
my $src = <<'EOT';
E7A781E381AEE3839BE38390E383BCE382AFE383
A9E38395E38388E381AFE9B0BBE381A7E38184E3
81A3E381B1E38184E381A7E38199E38082
EOT
$src =~ s/\s//g; $testtext = pack('H*', $src);
}
# source_test($fam, $ser)
sub source_test {
my ($fam, $ser) = @_; local ($_);
$_ = <<'END';
\documentclass[a4paper]{jsarticle}
\AtBeginDvi{\special{pdf:mapfile pdfm-?FAM?}}
\usepackage{pxpjcid-?FAM?}
\begin{document}
\?FAM?family\kanjiseries{?SER?}\selectfont
?TEXT?
\end{document}
END
s/\?FAM\?/$fam/g; s/\?SER\?/$ser/g;
s/\?TEXT\?/$testtext/g;
return "\xEF\xBB\xBF" . $_; # BOM added
}
##----------------------------------------------------------
## Main
# append_mode($value)
our $append_mode;
sub append_mode { $append_mode = $_[0]; }
# save_source($value)
our $save_source;
sub save_source { $save_source = $_[0]; }
# generate($font, $fam, $enclist)
sub generate {
my ($font, $fam, $ser, $tfmfam, $index) = @_;
#
my $par = get_metric($font, $index, $target_ucs);
#
my $ofmname = fontname($tfmfam, $ser, 'J40');
info("Process for $ofmname...");
write_whole("$ofmname.opl", source_opl($par, $fam), 1);
system("$opl2ofm $ofmname.opl $ofmname.ofm");
(-s "$ofmname.ofm")
or error("failed in converting OPL -> OFM", "$ofmname.ofm");
if (!$save_source) { unlink("$ofmname.opl"); }
#
my @encset = ('JY1', 'JY2');
foreach my $enc (@encset) {
#
my $vfname = fontname($tfmfam, $ser, $enc);
info("Process for $vfname...");
my ($pl, $ovp, $wdpd);
while (!defined $pl) {
($pl, $ovp) = source_virtual($par, $fam, $ser, $enc, $tfmfam,
(defined $wdpd) ? (1 / $wdpd) : undef);
$wdpd = (defined $wdpd) ? ($wdpd / 2) : 8192;
}
write_whole("$vfname.pl", $pl, 1);
system("$pltotf $vfname.pl $vfname.tfm");
(-s "$vfname.tfm")
or error("failed in converting PL -> TFM", "$vfname.tfm");
write_whole("$vfname.ovp", $ovp, 1);
#
system("$ovp2ovf $vfname.ovp $vfname.ovf $temp_base.ofm");
unlink("$vfname.vf"); rename("$vfname.ovf", "$vfname.vf");
(-s "$vfname.vf")
or error("failed in converting OPL -> VF", "$vfname.vf");
if (!$save_source) { unlink("$vfname.pl", "$vfname.ovp"); }
# (raw)
my $rjfmname = fontname($tfmfam, $ser, $enc, 1);
write_whole("$rjfmname.pl", standard_pl(), 1);
system("$pltotf $rjfmname.pl $rjfmname.tfm");
unlink("$rjfmname.pl");
#
my $orgfd; my $tenc = $enc_tate->{$enc} or die;
my $fdname = lc("$enc$fam");
my $tfdname = lc("$tenc$fam");
if ($append_mode && -f "$fdname.fd") {
$orgfd = read_whole("$fdname.fd");
}
my ($fd, $tfd) = source_fd($fam, $ser, $enc, $tfmfam, $orgfd);
write_whole("$fdname.fd", $fd);
write_whole("$tfdname.fd", $tfd);
}
#
my $mapname = "pdfm-$fam"; my $orgmap;
if ($append_mode && -f "$mapname.map") {
$orgmap = read_whole("$mapname.map");
}
my $map = source_map($fam, $ser, $tfmfam, $font, $index, $orgmap, \@encset);
write_whole("$mapname.map", $map);
#
my $styname = "pxpjcid-$fam";
my $sty = source_style($fam);
if (!$append_mode) { write_whole("$styname.sty", $sty); }
my $texname = "pxpjcid-test-$fam-$ser";
my $tex = source_test($fam, $ser);
write_whole("$texname.tex", $tex);
}
#-----------------------------------------------------------
# main()
sub main {
my $prop = read_option();
(defined $prop->{gid_offset}) and $gid_offset = $prop->{gid_offset};
append_mode($prop->{append});
use_berry($prop->{use_berry});
save_source($prop->{save_source});
save_log($prop->{save_log});
set_scale($prop->{scale});
gen_target_list();
generate($prop->{font}, $prop->{family}, $prop->{series},
$prop->{tfm_family}, $prop->{index});
}
# read_option()
sub read_option {
my $prop = {};
if (!@ARGV) { show_usage(); exit; }
while ($ARGV[0] =~ m/^-/) {
my $opt = shift(@ARGV); my $arg;
if ($opt =~ m/^--?h(elp)?/) {
show_usage(); exit;
} elsif ($opt eq '-a' || $opt eq '--append') {
$prop->{append} = 1;
} elsif ($opt eq '-b' || $opt eq '--use-berry') {
$prop->{use_berry} = 1;
} elsif ($opt eq '-s' || $opt eq '--save-source') {
$prop->{save_source} = 1;
} elsif ($opt eq '--save-log') {
$prop->{save_log} = 1;
} elsif (($arg) = $opt =~ m/^-(?:t|-tfm-family)(?:=(.*))?$/) {
(defined $arg) or $arg = shift(@ARGV);
($arg =~ m/^[a-z0-9]+$/) or error("bad family name", $arg);
$prop->{tfm_family} = $arg;
} elsif (($arg) = $opt =~ m/^--scale(?:=(.*))?$/) {
(defined $arg) or $arg = shift(@ARGV);
($arg =~ m/^[.0-9]+$/ && 0 <= $arg && $arg < 10)
or error("bad scale value", $arg);
$prop->{scale} = $arg;
} elsif (($arg) = $opt =~ m/^--gid-offset(?:=(.*))?$/) {
(defined $arg) or $arg = shift(@ARGV);
($arg =~ m/^[0-9]+$/) or error("bad gid-offset value", $arg);
$prop->{gid_offset} = $arg;
} elsif (($arg) = $opt =~ m/^-(?:i|-index)(?:=(.*))?$/) {
(defined $arg) or $arg = shift(@ARGV);
($arg =~ m/^[0-9]+$/) or error("bad TTC index value", $arg);
$prop->{index} = $arg;
} else {
error("invalid option", $opt);
}
}
($#ARGV == 1) or error("wrong number of command arguments");
my ($fam, $ser) = ($ARGV[0] =~ m|^(.*?)/(.*)$|) ?
($1, $2) : ($ARGV[0], 'm');
($fam =~ m/^[a-z]+$/) or error("bad family name", $fam);
($ser =~ m/^[a-z]+$/) or error("bad series name", $ser);
(exists $ser_kb->{$ser}) or error("unknown series name", $ser);
$prop->{family} = $fam; $prop->{series} = $ser;
$prop->{font} = $ARGV[1];
(defined $prop->{tfm_family})
or $prop->{tfm_family} = $prop->{family};
return $prop;
}
# show_usage()
sub show_usage {
print <<"END";
This is $prog_name v$version <$mod_date> by 'ZR'.
Usage: $prog_name [<option>...] <family>[/<series>] <font_file>
<family> LaTeX family name to designate the font
<series> LaTeX series name to designate the font
<font_file> file name of the target font (NOT font name); the file
must be put in the location Kpathsea can find
Options are:
-a / --append append mode (for .fd & .map)
-t / --tfm-family=<name> font family name used in tfm names
-s / --save-source save PL/OPL/OVP files
--scale scale value
--gid-offset=<val> offset between CID and GID
END
}
# info($msg, ...)
sub info {
print STDERR (join(": ", $prog_name, @_), "\n");
}
# error($msg, ...)
sub error {
print STDERR (join(": ", $prog_name, @_), "\n");
exit(-1);
}
# max($x, $y)
sub max {
return ($_[0] > $_[1]) ? $_[0] : $_[1];
}
# save_log($value)
our $save_log;
sub save_log { $save_log = $_[0]; }
# write_whole($name, $dat, $bin)
sub write_whole {
my ($name, $dat, $bin) = @_;
open(my $ho, '>', $name)
or error("cannot create file", $name);
if ($bin) { binmode($ho); }
print $ho ($dat);
close($ho);
}
# read_whole($name, $bin)
sub read_whole {
my ($name, $bin) = @_; local ($/);
open(my $hi, '<', $name)
or error("cannot open file for input", $name);
if ($bin) { binmode($hi); }
my $dat = <$hi>;
close($hi);
return $dat;
}
END {
if ($save_log) {
unlink("$prog_name-save.log");
rename("$temp_base.log", "$prog_name-save.log");
}
unlink("$temp_base.tex", "$temp_base.log", "$temp_base.ofm");
}
#-----------------------------------------------------------
main();
__DATA__
(
0,1,
1,1,
2,1,
3,1,
4,1,
5,1,
6,1,
7,1,
8,1,
9,1,
10,1,
11,1,
12,1,
13,1,
14,1,
15,1,
16,1,
17,1,
18,1,
19,1,
20,1,
21,1,
22,1,
23,1,
24,1,
25,1,
26,1,
27,1,
28,1,
29,1,
30,1,
31,1,
32,1,
33,2,
34,3,
35,4,
36,5,
37,6,
38,7,
39,8,
40,9,
41,10,
42,11,
43,12,
44,13,
45,14,
46,15,
47,16,
48,17,
49,18,
50,19,
51,20,
52,21,
53,22,
54,23,
55,24,
56,25,
57,26,
58,27,
59,28,
60,29,
61,30,
62,31,
63,32,
64,33,
65,34,
66,35,
67,36,
68,37,
69,38,
70,39,
71,40,
72,41,
73,42,
74,43,
75,44,
76,45,
77,46,
78,47,
79,48,
80,49,
81,50,
82,51,
83,52,
84,53,
85,54,
86,55,
87,56,
88,57,
89,58,
90,59,
91,60,
92,97,
93,62,
94,63,
95,64,
96,65,
97,66,
98,67,
99,68,
100,69,
101,70,
102,71,
103,72,
104,73,
105,74,
106,75,
107,76,
108,77,
109,78,
110,79,
111,80,
112,81,
113,82,
114,83,
115,84,
116,85,
117,86,
118,87,
119,88,
120,89,
121,90,
122,91,
123,92,
124,99,
125,94,
126,100,
160,1,
161,101,
162,102,
163,103,
164,107,
165,61,
166,93,
167,106,
168,132,
169,152,
170,140,
171,109,
172,153,
174,154,
175,129,
176,155,
177,156,
178,157,
179,158,
180,127,
181,159,
182,118,
183,117,
184,134,
185,160,
186,144,
187,123,
188,161,
189,162,
190,163,
191,126,
192,164,
193,165,
194,166,
195,167,
196,168,
197,169,
198,139,
199,170,
200,171,
201,172,
202,173,
203,174,
204,175,
205,176,
206,177,
207,178,
208,179,
209,180,
210,181,
211,182,
212,183,
213,184,
214,185,
215,186,
216,142,
217,187,
218,188,
219,189,
220,190,
221,191,
222,192,
223,150,
224,193,
225,194,
226,195,
227,196,
228,197,
229,198,
230,145,
231,199,
232,200,
233,201,
234,202,
235,203,
236,204,
237,205,
238,206,
239,207,
240,208,
241,209,
242,210,
243,211,
244,212,
245,213,
246,214,
247,215,
248,148,
249,216,
250,217,
251,218,
252,219,
253,220,
254,221,
255,222,
256,9366,
257,9361,
258,15756,
259,15769,
260,15737,
261,15745,
262,15758,
263,15771,
264,15783,
265,15789,
266,20333,
267,20352,
268,15759,
269,15772,
270,15761,
271,15774,
272,20322,
273,15775,
274,9369,
275,9364,
278,20334,
279,20353,
280,15760,
281,15773,
282,9395,
283,9407,
284,15784,
285,15790,
286,20335,
287,20355,
288,20337,
289,20356,
290,20336,
292,15785,
293,15791,
294,20323,
295,15816,
296,9400,
297,9412,
298,9367,
299,9362,
302,20339,
303,20357,
304,20338,
305,146,
306,20324,
307,20328,
308,15786,
309,15792,
310,20340,
311,20358,
312,20329,
313,15757,
314,15770,
315,20342,
316,20360,
317,15739,
318,15747,
319,20325,
320,20330,
321,141,
322,147,
323,15762,
324,15776,
325,20343,
326,20361,
327,15763,
328,15777,
329,20331,
330,20326,
331,9436,
332,9370,
333,9365,
336,15764,
337,15778,
338,143,
339,149,
340,15755,
341,15768,
342,20344,
343,20362,
344,15765,
345,15779,
346,15740,
347,15748,
348,15787,
349,15793,
350,15741,
351,15750,
352,223,
353,227,
354,15767,
355,15781,
356,15742,
357,15751,
358,20327,
359,20332,
360,9405,
361,9417,
362,9368,
363,9363,
364,15788,
365,15794,
366,9404,
367,9416,
368,15766,
369,15780,
370,20345,
371,20363,
372,20350,
373,20364,
374,20351,
375,20365,
376,224,
377,15743,
378,15752,
379,15744,
380,15754,
381,225,
382,229,
402,105,
403,15826,
450,15821,
461,9394,
462,9406,
463,9398,
464,9410,
465,9401,
466,9413,
467,9403,
468,9415,
469,20349,
470,15733,
471,20346,
472,15734,
473,20348,
474,15735,
475,20347,
476,15736,
501,20354,
504,15731,
505,15732,
509,9421,
567,9435,
592,15832,
593,9418,
594,15836,
595,15822,
596,9423,
597,15841,
598,15802,
599,15823,
600,15829,
601,9426,
602,9429,
603,9432,
604,15830,
606,15831,
607,15809,
608,15825,
609,15813,
610,15883,
611,15884,
612,15835,
613,15838,
614,15819,
615,15844,
616,15827,
618,15885,
620,15798,
621,15808,
622,15799,
623,15833,
624,15814,
625,15795,
626,15810,
627,15803,
628,15886,
629,9437,
630,15887,
632,15888,
633,15800,
634,15843,
635,15807,
637,15804,
638,15797,
640,15889,
641,15815,
642,15805,
643,9442,
644,15824,
648,15801,
649,15828,
650,15834,
651,15796,
652,9438,
653,15837,
654,15812,
655,15890,
656,15806,
657,15842,
658,9441,
660,15818,
661,15817,
664,15820,
665,15891,
668,15892,
669,15811,
671,15893,
673,15840,
674,15839,
688,15894,
690,15895,
695,15896,
699,98,
700,96,
705,15897,
710,128,
711,15749,
712,15846,
716,15847,
720,9443,
721,15848,
728,15738,
729,15782,
730,133,
731,15746,
732,95,
733,15753,
734,15867,
736,15898,
737,15899,
741,15851,
742,15852,
743,15853,
744,15854,
745,15855,
768,65,
769,127,
770,128,
771,95,
772,129,
773,226,
774,130,
775,131,
776,132,
778,133,
779,135,
780,137,
783,15850,
792,15874,
793,15875,
794,15879,
796,15861,
797,15872,
798,15873,
799,15862,
800,15863,
804,15868,
805,15858,
807,134,
808,136,
809,15865,
810,15876,
812,15859,
815,15866,
816,15869,
818,64,
820,15871,
822,138,
825,15860,
826,15877,
827,15878,
828,15870,
829,15864,
865,15845,
900,20317,
901,20318,
937,9355,
956,159,
7742,15729,
7743,15730,
7868,9397,
7869,9409,
8048,9420,
8049,9419,
8050,9434,
8051,9433,
8194,1,
8195,1,
8208,14,
8209,14,
8210,114,
8211,114,
8212,138,
8213,138,
8214,666,
8216,98,
8217,96,
8218,120,
8220,108,
8221,122,
8222,121,
8224,115,
8225,116,
8226,119,
8229,669,
8230,124,
8240,125,
8242,9356,
8243,9357,
8249,110,
8250,111,
8251,734,
8254,226,
8255,15849,
8260,104,
8304,9377,
8308,9378,
8309,9379,
8310,9380,
8311,9381,
8312,9382,
8313,9383,
8320,9384,
8321,9385,
8322,9386,
8323,9387,
8324,9388,
8325,9389,
8326,9390,
8327,9391,
8328,9392,
8329,9393,
8364,9354,
8451,15461,
8463,15514,
8482,228,
8486,9355,
8487,15515,
8494,20366,
8501,15513,
8531,9375,
8532,9376,
8533,15727,
8539,9371,
8540,9372,
8541,9373,
8542,9374,
8592,737,
8593,738,
8594,736,
8595,739,
8596,15511,
8644,8310,
8645,8311,
8646,8309,
8658,15482,
8660,15483,
8678,8013,
8679,8012,
8680,8014,
8681,8011,
8704,15484,
8706,15493,
8707,15485,
8709,15477,
8710,20367,
8711,15494,
8712,15464,
8713,15476,
8714,15900,
8715,15465,
8719,20368,
8721,15901,
8722,151,
8723,15512,
8729,117,
8730,15499,
8733,15501,
8734,15459,
8735,15881,
8736,15491,
8741,15489,
8742,15490,
8743,15480,
8744,15481,
8745,15471,
8746,15470,
8747,15503,
8748,15504,
8749,15902,
8750,15880,
8756,15460,
8757,15502,
8764,100,
8765,15500,
8771,15506,
8773,15507,
8776,15508,
8786,15496,
8800,15456,
8801,15495,
8802,15505,
8804,20369,
8805,20370,
8806,15457,
8807,15458,
8810,15497,
8811,15498,
8818,15903,
8819,15904,
8822,15509,
8823,15510,
8834,15468,
8835,15469,
8836,15472,
8837,15473,
8838,15466,
8839,15467,
8842,15474,
8843,15475,
8853,15486,
8854,15487,
8855,15488,
8856,15905,
8862,15906,
8864,15907,
8869,15492,
8895,15882,
8922,15725,
8923,15726,
8965,15478,
8966,15479,
8984,15728,
9115,12143,
9116,12167,
9117,12144,
9118,12145,
9119,12167,
9120,12146,
9121,12151,
9122,12167,
9123,12152,
9124,12153,
9125,12167,
9126,12154,
9127,8178,
9128,8179,
9129,8180,
9130,12167,
9131,8174,
9132,8175,
9133,8176,
9136,16312,
9137,16313,
9472,7479,
9473,7480,
9474,7481,
9475,7482,
9476,7483,
9477,7484,
9478,7485,
9479,7486,
9480,7487,
9481,7488,
9482,7489,
9483,7490,
9484,7491,
9485,7492,
9486,7493,
9487,7494,
9488,7495,
9489,7496,
9490,7497,
9491,7498,
9492,7499,
9493,7500,
9494,7501,
9495,7502,
9496,7503,
9497,7504,
9498,7505,
9499,7506,
9500,7507,
9501,7508,
9502,7509,
9503,7510,
9504,7511,
9505,7512,
9506,7513,
9507,7514,
9508,7515,
9509,7516,
9510,7517,
9511,7518,
9512,7519,
9513,7520,
9514,7521,
9515,7522,
9516,7523,
9517,7524,
9518,7525,
9519,7526,
9520,7527,
9521,7528,
9522,7529,
9523,7530,
9524,7531,
9525,7532,
9526,7533,
9527,7534,
9528,7535,
9529,7536,
9530,7537,
9531,7538,
9532,7539,
9533,7540,
9534,7541,
9535,7542,
9536,7543,
9537,7544,
9538,7545,
9539,7546,
9540,7547,
9541,7548,
9542,7549,
9543,7550,
9544,7551,
9545,7552,
9546,7553,
9547,7554,
9650,731,
9651,730,
9673,8210,
9674,20371,
9675,723,
9678,725,
9702,12254,
9756,8220,
9757,8221,
9758,8219,
9759,8222,
9792,706,
9794,705,
9986,12176,
10145,8206,
10687,16203,
11013,8207,
11014,8208,
11015,8209,
12288,1,
12289,634,
12290,635,
12291,15453,
12294,15454,
12296,110,
12297,111,
12298,109,
12299,123,
12300,686,
12301,687,
12302,688,
12303,689,
12304,690,
12305,691,
12306,735,
12307,740,
12308,676,
12309,677,
12310,16197,
12311,16198,
12312,12129,
12313,12130,
12316,100,
12317,7608,
12319,7609,
12353,15517,
12354,15518,
12355,15519,
12356,15520,
12357,15521,
12358,15522,
12359,15523,
12360,15524,
12361,15525,
12362,15526,
12363,15527,
12364,15528,
12365,15529,
12366,15530,
12367,15531,
12368,15532,
12369,15533,
12370,15534,
12371,15535,
12372,15536,
12373,15537,
12374,15538,
12375,15539,
12376,15540,
12377,15541,
12378,15542,
12379,15543,
12380,15544,
12381,15545,
12382,15546,
12383,15547,
12384,15548,
12385,15549,
12386,15550,
12387,15551,
12388,15552,
12389,15553,
12390,15554,
12391,15555,
12392,15556,
12393,15557,
12394,15558,
12395,15559,
12396,15560,
12397,15561,
12398,15562,
12399,15563,
12400,15564,
12401,15565,
12402,15566,
12403,15567,
12404,15568,
12405,15569,
12406,15570,
12407,15571,
12408,15572,
12409,15573,
12410,15574,
12411,15575,
12412,15576,
12413,15577,
12414,15578,
12415,15579,
12416,15580,
12417,15581,
12418,15582,
12419,15583,
12420,15584,
12421,15585,
12422,15586,
12423,15587,
12424,15588,
12425,15589,
12426,15590,
12427,15591,
12428,15592,
12429,15593,
12430,15594,
12431,15595,
12432,15596,
12433,15597,
12434,15598,
12435,15599,
12436,15600,
12437,15601,
12438,15602,
12443,643,
12444,644,
12445,15451,
12446,15452,
12447,15463,
12448,15516,
12449,15608,
12450,15609,
12451,15610,
12452,15611,
12453,15612,
12454,15613,
12455,15614,
12456,15615,
12457,15616,
12458,15617,
12459,15618,
12460,15619,
12461,15620,
12462,15621,
12463,15622,
12464,15623,
12465,15624,
12466,15625,
12467,15626,
12468,15627,
12469,15628,
12470,15629,
12471,15630,
12472,15631,
12473,15632,
12474,15633,
12475,15634,
12476,15635,
12477,15636,
12478,15637,
12479,15638,
12480,15639,
12481,15640,
12482,15641,
12483,15642,
12484,15643,
12485,15644,
12486,15645,
12487,15646,
12488,15647,
12489,15648,
12490,15649,
12491,15650,
12492,15651,
12493,15652,
12494,15653,
12495,15654,
12496,15655,
12497,15656,
12498,15657,
12499,15658,
12500,15659,
12501,15660,
12502,15661,
12503,15662,
12504,15663,
12505,15664,
12506,15665,
12507,15666,
12508,15667,
12509,15668,
12510,15669,
12511,15670,
12512,15671,
12513,15672,
12514,15673,
12515,15674,
12516,15675,
12517,15676,
12518,15677,
12519,15678,
12520,15679,
12521,15680,
12522,15681,
12523,15682,
12524,15683,
12525,15684,
12526,15685,
12527,15686,
12528,15687,
12529,15688,
12530,15689,
12531,15690,
12532,15691,
12533,15692,
12534,15693,
12535,15719,
12536,15720,
12537,15721,
12538,15722,
12539,331,
12540,15455,
12541,15449,
12542,15450,
12543,15462,
12784,15702,
12785,15703,
12786,15704,
12787,15705,
12788,15706,
12789,15707,
12790,15708,
12791,15709,
12792,15710,
12793,15711,
12794,15713,
12795,15714,
12796,15715,
12797,15716,
12798,15717,
12799,15718,
13056,8048,
13057,11874,
13058,11875,
13059,8042,
13060,11876,
13061,8183,
13062,11877,
13063,11881,
13064,11879,
13065,11884,
13066,11882,
13067,11886,
13068,11888,
13069,7595,
13070,11889,
13071,11890,
13072,11891,
13073,11892,
13074,11893,
13075,11894,
13076,7586,
13077,8041,
13078,8039,
13079,11896,
13080,8040,
13081,11898,
13082,11900,
13083,11901,
13084,11902,
13085,11903,
13086,8051,
13087,11904,
13088,11905,
13089,11906,
13090,8038,
13091,8043,
13092,11907,
13093,11909,
13094,7596,
13095,7590,
13096,11912,
13097,11913,
13098,8052,
13099,7598,
13101,11915,
13102,11918,
13103,11919,
13104,11920,
13105,8049,
13106,11921,
13107,8327,
13108,11924,
13109,11925,
13110,7592,
13111,11930,
13112,11932,
13113,8046,
13114,11933,
13115,8047,
13116,11926,
13117,11934,
13118,11936,
13119,11937,
13120,11938,
13121,11935,
13122,8045,
13123,11939,
13124,11940,
13125,11941,
13126,11942,
13127,8050,
13128,11943,
13129,7585,
13130,7599,
13131,11944,
13132,11945,
13133,7588,
13134,8328,
13135,11946,
13136,11947,
13137,7593,
13138,11950,
13139,11954,
13140,11951,
13141,11955,
13142,11956,
13143,8044,
13179,8323,
13180,7623,
13181,7622,
13182,7621,
13183,8054,
64256,9358,
64257,112,
64258,113,
64259,9359,
64260,9360,
65077,7899,
65078,7900,
65081,7901,
65082,7902,
65281,2,
65282,3,
65283,4,
65284,5,
65285,6,
65286,7,
65287,8,
65288,9,
65289,10,
65290,11,
65291,12,
65292,13,
65293,151,
65294,15,
65295,104,
65296,17,
65297,18,
65298,19,
65299,20,
65300,21,
65301,22,
65302,23,
65303,24,
65304,25,
65305,26,
65306,27,
65307,28,
65308,29,
65309,30,
65310,31,
65311,32,
65312,33,
65313,34,
65314,35,
65315,36,
65316,37,
65317,38,
65318,39,
65319,40,
65320,41,
65321,42,
65322,43,
65323,44,
65324,45,
65325,46,
65326,47,
65327,48,
65328,49,
65329,50,
65330,51,
65331,52,
65332,53,
65333,54,
65334,55,
65335,56,
65336,57,
65337,58,
65338,59,
65339,60,
65340,97,
65341,62,
65342,128,
65343,64,
65344,65,
65345,66,
65346,67,
65347,68,
65348,69,
65349,70,
65350,71,
65351,72,
65352,73,
65353,74,
65354,75,
65355,76,
65356,77,
65357,78,
65358,79,
65359,80,
65360,81,
65361,82,
65362,83,
65363,84,
65364,85,
65365,86,
65366,87,
65367,88,
65368,89,
65369,90,
65370,91,
65371,92,
65372,99,
65373,94,
65374,100,
65375,12131,
65376,12132,
65377,327,
65378,328,
65379,329,
65380,330,
65381,331,
65382,15689,
65383,15608,
65384,15610,
65385,15612,
65386,15614,
65387,15616,
65388,15674,
65389,15676,
65390,15678,
65391,15642,
65392,15455,
65393,15609,
65394,15611,
65395,15613,
65396,15615,
65397,15617,
65398,15618,
65399,15620,
65400,15622,
65401,15624,
65402,15626,
65403,15628,
65404,15630,
65405,15632,
65406,15634,
65407,15636,
65408,15638,
65409,15640,
65410,15643,
65411,15645,
65412,15647,
65413,15649,
65414,15650,
65415,15651,
65416,15652,
65417,15653,
65418,15654,
65419,15657,
65420,15660,
65421,15663,
65422,15666,
65423,15669,
65424,15670,
65425,15671,
65426,15672,
65427,15673,
65428,15675,
65429,15677,
65430,15679,
65431,15680,
65432,15681,
65433,15682,
65434,15683,
65435,15684,
65436,15686,
65437,15690,
65438,388,
65439,389,
65504,102,
65505,103,
65507,226,
65508,93,
65509,61,
65512,99,
# adjustments
8212,661,
8213,661,
8216,98,
8217,96,
8220,108,
8221,122,
8229,669,
8230,668,
8242,708,
8243,709,
12289,330,
12290,327,
12296,506,
12297,507,
12298,508,
12299,509,
12300,328,
12301,329,
12302,510,
12303,511,
12304,512,
12305,513,
12308,504,
12309,505,
12312,12076,
12313,12077,
12317,423,
12319,424,
12539,331,
65288,239,
65289,240,
65292,243,
65294,245,
65306,257,
65307,258,
65339,290,
65341,292,
65371,322,
65373,324,
65375,12078,
65376,12079,
);
| 16.369451 | 78 | 0.601301 |
ed57aedacdd9215403a0a3d3ffbab209dfa18843
| 1,441 |
pl
|
Perl
|
ass/ass1/prolog_search/romania.pl
|
haoweilou/COMP3411
|
cff984c311a05daa396821156919452547dfa347
|
[
"MIT"
] | null | null | null |
ass/ass1/prolog_search/romania.pl
|
haoweilou/COMP3411
|
cff984c311a05daa396821156919452547dfa347
|
[
"MIT"
] | null | null | null |
ass/ass1/prolog_search/romania.pl
|
haoweilou/COMP3411
|
cff984c311a05daa396821156919452547dfa347
|
[
"MIT"
] | null | null | null |
% romania.pl
goal(bucharest).
s(arad,sibiu,140).
s(arad,timisoara,118).
s(arad,zerind,75).
s(bucharest,fagaras,211).
s(bucharest,giurgiu,90).
s(bucharest,pitesti,101).
s(bucharest,urziceni,85).
s(craiova,dobreta,120).
s(craiova,pitesti,138).
s(craiova,rimnicu_vilcea,146).
s(dobreta,craiova,120).
s(dobreta,mehadia,75).
s(eforie,hirsova,86).
s(fagaras,bucharest,211).
s(fagaras,sibiu,99).
s(giurgiu,bucharest,90).
s(hirsova,eforie,86).
s(hirsova,urziceni,98).
s(iasi,neamt,87).
s(iasi,vaslui,92).
s(lugoj,mehadia,70).
s(lugoj,timisoara,111).
s(mehadia,dobreta,75).
s(mehadia,lugoj,70).
s(neamt,iasi,87).
s(oradea,sibiu,151).
s(oradea,zerind,71).
s(pitesti,bucharest,101).
s(pitesti,craiova,138).
s(pitesti,rimnicu_vilcea,97).
s(rimnicu_vilcea,craiova,146).
s(rimnicu_vilcea,pitesti,97).
s(rimnicu_vilcea,sibiu,80).
s(sibiu,arad,140).
s(sibiu,fagaras,99).
s(sibiu,oradea,151).
s(sibiu,rimnicu_vilcea,80).
s(timisoara,arad,118).
s(timisoara,lugoj,111).
s(urziceni,bucharest,85).
s(urziceni,hirsova,98).
s(urziceni,vaslui,142).
s(vaslui,iasi,92).
s(vaslui,urziceni,142).
s(zerind,arad,75).
s(zerind,oradea,71).
h(arad,366).
h(bucharest,0).
h(craiova,160).
h(dobreta,242).
h(eforie,161).
h(fagaras,178).
h(giurgiu,77).
h(hirsova,151).
h(iasi,226).
h(lugoj,244).
h(mehadia,241).
h(neamt,234).
h(oradea,380).
h(pitesti,98).
h(rimnicu_vilcea,193).
h(sibiu,253).
h(timisoara,329).
h(urziceni,80).
h(vaslui,199).
h(zerind,374).
| 15.835165 | 30 | 0.718251 |
ed45e8c49b08c374308b71ea02c8bc913e8117bc
| 4,133 |
pm
|
Perl
|
apps/antivirus/kaspersky/snmp/mode/events.pm
|
nribault/centreon-plugins
|
e99276ba80ba202392791e78d72b00f1306d1a99
|
[
"Apache-2.0"
] | null | null | null |
apps/antivirus/kaspersky/snmp/mode/events.pm
|
nribault/centreon-plugins
|
e99276ba80ba202392791e78d72b00f1306d1a99
|
[
"Apache-2.0"
] | null | null | null |
apps/antivirus/kaspersky/snmp/mode/events.pm
|
nribault/centreon-plugins
|
e99276ba80ba202392791e78d72b00f1306d1a99
|
[
"Apache-2.0"
] | null | null | null |
#
# Copyright 2020 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package apps::antivirus::kaspersky::snmp::mode::events;
use base qw(centreon::plugins::templates::counter);
use strict;
use warnings;
use centreon::plugins::templates::catalog_functions qw(catalog_status_threshold);
sub custom_status_output {
my ($self, %options) = @_;
my $msg = sprintf("Events status is '%s'", $self->{result_values}->{status});
return $msg;
}
sub custom_status_calc {
my ($self, %options) = @_;
$self->{result_values}->{status} = $options{new_datas}->{$self->{instance} . '_eventsStatus'};
return 0;
}
sub set_counters {
my ($self, %options) = @_;
$self->{maps_counters_type} = [
{ name => 'global', type => 0, message_separator => ' - ' },
];
$self->{maps_counters}->{global} = [
{ label => 'status', set => {
key_values => [ { name => 'eventsStatus' } ],
closure_custom_calc => $self->can('custom_status_calc'),
closure_custom_output => $self->can('custom_status_output'),
closure_custom_perfdata => sub { return 0; },
closure_custom_threshold_check => \&catalog_status_threshold,
}
},
{ label => 'events', set => {
key_values => [ { name => 'criticalEventsCount' } ],
output_template => '%d critical event(s)',
perfdatas => [
{ label => 'events', value => 'criticalEventsCount', template => '%d', min => 0 },
],
}
},
];
}
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
$options{options}->add_options(arguments =>
{
"warning-status:s" => { name => 'warning_status', default => '%{status} =~ /Warning/i' },
"critical-status:s" => { name => 'critical_status', default => '%{status} =~ /Critical/i' },
});
return $self;
}
sub check_options {
my ($self, %options) = @_;
$self->SUPER::check_options(%options);
$self->change_macros(macros => ['warning_status', 'critical_status']);
}
my %map_status = (
0 => 'OK',
1 => 'Info',
2 => 'Warning',
3 => 'Critical',
);
my $oid_eventsStatus = '.1.3.6.1.4.1.23668.1093.1.6.1';
my $oid_criticalEventsCount = '.1.3.6.1.4.1.23668.1093.1.6.3';
sub manage_selection {
my ($self, %options) = @_;
my $snmp_result = $options{snmp}->get_leef(oids => [ $oid_eventsStatus, $oid_criticalEventsCount ],
nothing_quit => 1);
$self->{global} = {};
$self->{global} = {
eventsStatus => $map_status{$snmp_result->{$oid_eventsStatus}},
criticalEventsCount => $snmp_result->{$oid_criticalEventsCount},
};
}
1;
__END__
=head1 MODE
Check events status.
=over 8
=item B<--warning-status>
Set warning threshold for status. (Default: '%{status} =~ /Warning/i').
Can use special variables like: %{status}
=item B<--critical-status>
Set critical threshold for status. (Default: '%{status} =~ /Critical/i').
Can use special variables like: %{status}
=item B<--warning-*>
Threshold warning.
Can be: 'events'.
=item B<--critical-*>
Threshold critical.
Can be: 'events'.
=back
=cut
| 28.115646 | 132 | 0.591338 |
ed3015c60e9e6f885850487ec1c218548ec67fea
| 938 |
t
|
Perl
|
t/50-mech-content-nonhtml.t
|
Corion/WWW-Mechanize-Chrome
|
484030ede01fd8e0b3366d1dacbd646f96e95279
|
[
"Artistic-2.0"
] | 27 |
2017-07-24T12:02:36.000Z
|
2022-01-04T10:39:12.000Z
|
t/50-mech-content-nonhtml.t
|
Corion/WWW-Mechanize-Chrome
|
484030ede01fd8e0b3366d1dacbd646f96e95279
|
[
"Artistic-2.0"
] | 53 |
2017-06-26T21:41:40.000Z
|
2022-01-20T16:51:24.000Z
|
t/50-mech-content-nonhtml.t
|
Corion/WWW-Mechanize-Chrome
|
484030ede01fd8e0b3366d1dacbd646f96e95279
|
[
"Artistic-2.0"
] | 16 |
2017-05-25T06:15:42.000Z
|
2022-01-04T10:39:16.000Z
|
#!perl -w
use strict;
use Test::More;
use WWW::Mechanize::Chrome;
use Log::Log4perl qw(:easy);
use lib '.';
use t::helper;
Log::Log4perl->easy_init($ERROR); # Set priority of root logger to ERROR
# What instances of Chrome will we try?
my @instances = t::helper::browser_instances();
my $testcount = 2;
if (my $err = t::helper::default_unavailable) {
plan skip_all => "Couldn't connect to Chrome: $@";
exit
} else {
plan tests => $testcount*@instances;
};
sub new_mech {
t::helper::need_minimum_chrome_version( '62.0.0.0', @_ );
WWW::Mechanize::Chrome->new(
autodie => 1,
@_,
);
};
t::helper::run_across_instances(\@instances, \&new_mech, $testcount, sub {
my ($browser_instance, $mech) = @_;
isa_ok $mech, 'WWW::Mechanize::Chrome';
$mech->get('https://corion.net/style.css');
my $body = $mech->decoded_content();
like $body, qr!^/\*!, "We retrieve the raw CSS";
});
| 22.333333 | 74 | 0.6258 |
ed6011c99c9e359016e0a9f0d7bde5bfa914635e
| 275 |
pm
|
Perl
|
App/Gromacs/Input.pm
|
vitduck/HPC
|
fbd2c9c5c8beb6fe76b562f52632626644720b01
|
[
"BSD-3-Clause"
] | null | null | null |
App/Gromacs/Input.pm
|
vitduck/HPC
|
fbd2c9c5c8beb6fe76b562f52632626644720b01
|
[
"BSD-3-Clause"
] | null | null | null |
App/Gromacs/Input.pm
|
vitduck/HPC
|
fbd2c9c5c8beb6fe76b562f52632626644720b01
|
[
"BSD-3-Clause"
] | null | null | null |
package HPC::App::Gromacs::Input;
use Moose::Role;
use MooseX::Attribute::Chained;
use HPC::Types::App::Gromacs qw(Tpr);
has 'tpr' => (
is => 'rw',
isa => Tpr,
traits => ['Chained'],
predicate => '_has_tpr',
coerce => 1,
);
1
| 16.176471 | 38 | 0.516364 |
73ee13cdd4d5dc26e4b256a77753c28573f8913d
| 2,395 |
pl
|
Perl
|
perl/lib/unicore/lib/Perl/_XExtend.pl
|
mnikolop/Thesis_project_CyberDoc
|
9a37fdd5a31de24cb902ee31ef19eb992faa1665
|
[
"Apache-2.0"
] | 4 |
2018-04-20T07:27:13.000Z
|
2021-12-21T05:19:24.000Z
|
perl/lib/unicore/lib/Perl/_XExtend.pl
|
mnikolop/Thesis_project_CyberDoc
|
9a37fdd5a31de24cb902ee31ef19eb992faa1665
|
[
"Apache-2.0"
] | 4 |
2021-03-10T19:10:00.000Z
|
2021-05-11T14:58:19.000Z
|
perl/lib/unicore/lib/Perl/_XExtend.pl
|
mnikolop/Thesis_project_CyberDoc
|
9a37fdd5a31de24cb902ee31ef19eb992faa1665
|
[
"Apache-2.0"
] | 1 |
2019-11-12T02:29:26.000Z
|
2019-11-12T02:29:26.000Z
|
# !!!!!!! DO NOT EDIT THIS FILE !!!!!!!
# This file is machine-generated by mktables from the Unicode
# database, Version 6.1.0. Any changes made here will be lost!
# !!!!!!! INTERNAL PERL USE ONLY !!!!!!!
# This file is for internal use by core Perl only. The format and even the
# name or existence of this file are subject to change without notice. Don't
# use it directly.
return <<'END';
0300 036F
0483 0489
0591 05BD
05BF
05C1 05C2
05C4 05C5
05C7
0610 061A
064B 065F
0670
06D6 06DC
06DF 06E4
06E7 06E8
06EA 06ED
0711
0730 074A
07A6 07B0
07EB 07F3
0816 0819
081B 0823
0825 0827
0829 082D
0859 085B
08E4 08FE
0900 0903
093A 093C
093E 094F
0951 0957
0962 0963
0981 0983
09BC
09BE 09C4
09C7 09C8
09CB 09CD
09D7
09E2 09E3
0A01 0A03
0A3C
0A3E 0A42
0A47 0A48
0A4B 0A4D
0A51
0A70 0A71
0A75
0A81 0A83
0ABC
0ABE 0AC5
0AC7 0AC9
0ACB 0ACD
0AE2 0AE3
0B01 0B03
0B3C
0B3E 0B44
0B47 0B48
0B4B 0B4D
0B56 0B57
0B62 0B63
0B82
0BBE 0BC2
0BC6 0BC8
0BCA 0BCD
0BD7
0C01 0C03
0C3E 0C44
0C46 0C48
0C4A 0C4D
0C55 0C56
0C62 0C63
0C82 0C83
0CBC
0CBE 0CC4
0CC6 0CC8
0CCA 0CCD
0CD5 0CD6
0CE2 0CE3
0D02 0D03
0D3E 0D44
0D46 0D48
0D4A 0D4D
0D57
0D62 0D63
0D82 0D83
0DCA
0DCF 0DD4
0DD6
0DD8 0DDF
0DF2 0DF3
0E31
0E33 0E3A
0E47 0E4E
0EB1
0EB3 0EB9
0EBB 0EBC
0EC8 0ECD
0F18 0F19
0F35
0F37
0F39
0F3E 0F3F
0F71 0F84
0F86 0F87
0F8D 0F97
0F99 0FBC
0FC6
102D 1037
1039 103E
1056 1059
105E 1060
1071 1074
1082
1084 1086
108D
109D
135D 135F
1712 1714
1732 1734
1752 1753
1772 1773
17B4 17D3
17DD
180B 180D
18A9
1920 192B
1930 193B
19B5 19B7
19BA
1A17 1A1B
1A55 1A5E
1A60
1A62
1A65 1A7C
1A7F
1B00 1B04
1B34 1B44
1B6B 1B73
1B80 1B82
1BA1 1BAD
1BE6 1BF3
1C24 1C37
1CD0 1CD2
1CD4 1CE8
1CED
1CF2 1CF4
1DC0 1DE6
1DFC 1DFF
200C 200D
20D0 20F0
2CEF 2CF1
2D7F
2DE0 2DFF
302A 302F
3099 309A
A66F A672
A674 A67D
A69F
A6F0 A6F1
A802
A806
A80B
A823 A827
A880 A881
A8B4 A8C4
A8E0 A8F1
A926 A92D
A947 A953
A980 A983
A9B3 A9C0
AA29 AA36
AA43
AA4C AA4D
AAB0
AAB2 AAB4
AAB7 AAB8
AABE AABF
AAC1
AAEB AAEF
AAF5 AAF6
ABE3 ABEA
ABEC ABED
FB1E
FE00 FE0F
FE20 FE26
FF9E FF9F
101FD
10A01 10A03
10A05 10A06
10A0C 10A0F
10A38 10A3A
10A3F
11000 11002
11038 11046
11080 11082
110B0 110BA
11100 11102
11127 11134
11180 11182
111B3 111C0
116AB 116B7
16F51 16F7E
16F8F 16F92
1D165 1D169
1D16D 1D172
1D17B 1D182
1D185 1D18B
1D1AA 1D1AD
1D242 1D244
E0100 E01EF
END
| 10.886364 | 77 | 0.751566 |
ed7c833c6df2cae79dbe9aad1680e9f898c9a67b
| 3,155 |
pl
|
Perl
|
ecpluginbuilder.pl
|
electric-cloud-community/unplug
|
31bd6445fdc0252da66afda615d2aa83e7ce73c9
|
[
"Apache-2.0"
] | 1 |
2019-02-26T13:05:05.000Z
|
2019-02-26T13:05:05.000Z
|
ecpluginbuilder.pl
|
electric-cloud-community/Unplug
|
31bd6445fdc0252da66afda615d2aa83e7ce73c9
|
[
"Apache-2.0"
] | 7 |
2019-02-22T12:54:26.000Z
|
2019-03-22T14:53:46.000Z
|
ecpluginbuilder.pl
|
electric-cloud-community/unplug
|
31bd6445fdc0252da66afda615d2aa83e7ce73c9
|
[
"Apache-2.0"
] | 1 |
2019-02-26T13:07:16.000Z
|
2019-02-26T13:07:16.000Z
|
#!/usr/bin/env perl
# Build, upload and promote Unplug using ecpluginbuilder
# https://github.com/electric-cloud/ecpluginbuilder
use XML::Simple qw(:strict);
use Data::Dumper;
use strict;
use File::Copy;
use ElectricCommander ();
$| = 1;
my $ec = new ElectricCommander->new();
my $epb="../ecpluginbuilder";
my $pluginVersion = "2.3.4";
my $pluginKey = "unplug";
# Fix version in plugin.xml
# Update plugin.xml with version,
print "[INFO] - Processing 'META-INF/plugin.xml' file...\n";
my $xs = XML::Simple->new(
ForceArray => 1,
KeyAttr => { },
KeepRoot => 1,
);
my $xmlFile = "META-INF/plugin.xml";
my $ref = $xs->XMLin($xmlFile);
$ref->{plugin}[0]->{version}[0] = $pluginVersion;
open(my $fh, '>', $xmlFile) or die "Could not open file '$xmlFile' $!";
print $fh $xs->XMLout($ref);
close $fh;
# Read buildCounter
my $buildCounter;
{
local $/ = undef;
open FILE, "buildCounter" or die "Couldn't open file: $!";
$buildCounter = <FILE>;
close FILE;
$buildCounter++;
$pluginVersion .= ".$buildCounter";
print "[INFO] - Incrementing build number to $buildCounter...\n";
open FILE, "> buildCounter" or die "Couldn't open file: $!";
print FILE $buildCounter;
close FILE;
}
my $pluginName = "${pluginKey}-${pluginVersion}";
print "[INFO] - Creating plugin '$pluginName'\n";
#
# creates pages un_run*.xml files from the template directory
print "[INFO] - Processing 'pages/un_run.xml' files...\n";
print " ";
# $xs = XML::Simple->new(
# ForceArray => 1,
# KeyAttr => {['plugin'] },
# KeepRoot => 1,
# );
#foreach my $var ("", 0 .. 9, "a" .. "z") {
# print "$var ";
# my $template="template/un_run.xml";
# Update un_runXXX.xml with key, version, label, description
# Bug cannot change the <plugin> attribute, it's confused with the element in <component plugin="EC-Core">
# $ref = $xs->XMLin($template);
# print Dumper ($ref);
# $ref->{componentContainer}[0]->{title}[0] = "$pluginKey $pluginVersion";
# $ref->{componentContainer}[0]->{component}[0]->{version}[0] = $pluginVersion;
# $ref->{componentContainer}[0]->{component}[0]->{plugin}[1] = $pluginKey;
# $ref->{componentContainer}[0]->{component}[0]->{requests}[0]->
# {request}[0]->{getProperty}[0]->{propertyName}[0] = "/javascript getProperty("/server/${pluginKey}/v${var}")";
#
# # save file
# open(my $fh, '>', $xmlFile) or die "Could not write file '$xmlFile' $!";
# print $fh $xs->XMLout($ref);
# close $fh;
#}
print "\n";
system ("$epb -pack-jar -plugin-name $pluginKey -plugin-version $pluginVersion " .
" -folder cgi-bin" .
" -folder META-INF" .
" -folder htdocs" .
" -folder pages" .
" -folder dsl"
);
move("build/${pluginKey}.jar", ".");
# Uninstall old plugin
# print "[INFO] - Uninstalling old plugin...\n";
# $ec->uninstallPlugin($pluginKey) || print "No old plugin\n";
# Install plugin
print "[INFO] - Installing plugin ${pluginKey}.jar...\n";
$ec->installPlugin("${pluginKey}.jar");
# Promote plugin
print "[INFO] - Promoting plugin...\n";
$ec->promotePlugin($pluginName);
| 28.944954 | 125 | 0.61458 |
ed607006bacaa7e28ec955595de9fab028923ed0
| 948 |
t
|
Perl
|
test/url.t
|
dagolden/git-hub
|
76c9d56190f8cdae11849d6e32a30e265e3de2ab
|
[
"MIT"
] | 1 |
2015-10-15T06:57:15.000Z
|
2015-10-15T06:57:15.000Z
|
test/url.t
|
perlpunk/git-hub
|
55ddab774e2809fd34e3d9866a3debccb944b8b9
|
[
"MIT"
] | null | null | null |
test/url.t
|
perlpunk/git-hub
|
55ddab774e2809fd34e3d9866a3debccb944b8b9
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env bash
# Set up the test environment (PATH, etc)
source test/setup
# This is set for other tests, but cause problems here
unset GIT_DIR
# Pull in TAP framework (from ext/test-more-bash)
use Test::More
# Make a test repo dir called tmp
# TODO: this should be abstracted
rm -fr tmp
mkdir tmp
# Create a basic repo with a 'bar' branch
# Run our test command and capture the output
# Parens () create a subprocess
url="$(
# This will change back to current dir after we exit subprocess
cd tmp
# Set up dir as a minimal repo the way we want it.
# D it in subprocess and throw away output
(
git init
git remote add origin [email protected]:test/test
touch foo
git add foo
git commit -m ...
git checkout -b bar
) &> /dev/null
# Run the test command
git hub url
)"
# Make sure output URL has 'bar' in it:
like "$url" bar "'git hub url' repects branches"
# Clean up test repo:
rm -fr tmp
done_testing
| 22.571429 | 65 | 0.694093 |
ed053bd8c9f86a6b84a80c942af2edbb21898a56
| 885 |
al
|
Perl
|
Apps/CZ/CompensationLocalization/app/Src/Codeunits/NotificationHandler.Codeunit.al
|
manjulchauhan/ALAppExtensions
|
3f2f1d6e5337188b1af9c0275420f1c1de036a7f
|
[
"MIT"
] | 127 |
2018-04-17T18:03:03.000Z
|
2019-05-06T18:54:17.000Z
|
Apps/CZ/CompensationLocalization/app/Src/Codeunits/NotificationHandler.Codeunit.al
|
snu-development/ALAppExtensions
|
371a27fe48483be776642dde19483a87ae27289c
|
[
"MIT"
] | 2,279 |
2018-09-12T12:01:49.000Z
|
2019-05-06T13:59:35.000Z
|
Apps/CZ/CompensationLocalization/app/Src/Codeunits/NotificationHandler.Codeunit.al
|
snu-development/ALAppExtensions
|
371a27fe48483be776642dde19483a87ae27289c
|
[
"MIT"
] | 41 |
2018-05-17T11:19:52.000Z
|
2019-04-30T17:30:38.000Z
|
codeunit 31424 "Notification Handler CZC"
{
var
CompensationTxt: Label 'Compensation';
[EventSubscriber(ObjectType::Codeunit, Codeunit::"Notification Management", 'OnGetDocumentTypeAndNumber', '', false, false)]
local procedure GetDocumentTypeAndNumberFromCompensationOnGetDocumentTypeAndNumber(var RecRef: RecordRef; var DocumentType: Text; var DocumentNo: Text; var IsHandled: Boolean)
var
FieldRef: FieldRef;
begin
if IsHandled then
exit;
IsHandled := true;
case RecRef.Number of
Database::"Compensation Header CZC":
begin
DocumentType := CompensationTxt;
FieldRef := RecRef.Field(5);
DocumentNo := Format(FieldRef.Value);
end;
else
IsHandled := false;
end;
end;
}
| 34.038462 | 179 | 0.60226 |
ed5806d981fa6e3c067d9e82300ceebbbd593b67
| 607 |
t
|
Perl
|
t/suite.t
|
miyagawa/Plack-Handler-AnyEvent-SCGI
|
dbc43d7765a23dedb8891d2ebe6510a1dfc6714f
|
[
"Artistic-1.0"
] | 1 |
2015-11-05T07:17:26.000Z
|
2015-11-05T07:17:26.000Z
|
t/suite.t
|
miyagawa/Plack-Handler-AnyEvent-SCGI
|
dbc43d7765a23dedb8891d2ebe6510a1dfc6714f
|
[
"Artistic-1.0"
] | null | null | null |
t/suite.t
|
miyagawa/Plack-Handler-AnyEvent-SCGI
|
dbc43d7765a23dedb8891d2ebe6510a1dfc6714f
|
[
"Artistic-1.0"
] | null | null | null |
use strict;
use warnings;
use Test::More;
use Plack;
use Plack::Handler::AnyEvent::SCGI;
use Plack::Test::Suite;
use t::SCGIUtils;
my $lighty_port;
my $scgi_port;
test_lighty_external(
sub {
($lighty_port, $scgi_port) = @_;
Plack::Test::Suite->run_server_tests(\&run_server, $scgi_port, $lighty_port);
done_testing();
}
);
sub run_server {
my($port, $app) = @_;
$| = 0; # Test::Builder autoflushes this. reset!
my $server = Plack::Handler::AnyEvent::SCGI->new(
host => '127.0.0.1',
port => $port,
);
$server->run($app);
}
| 18.393939 | 84 | 0.586491 |
ed7dab0c7e8d1992a28dbb171644c3995e89d01c
| 27,019 |
pm
|
Perl
|
local/lib/perl5/DateTime/TimeZone/Africa/Casablanca.pm
|
jkb78/extrajnm
|
6890e38e15f85ea9c09a141aa14affad0b8e91e7
|
[
"MIT"
] | null | null | null |
local/lib/perl5/DateTime/TimeZone/Africa/Casablanca.pm
|
jkb78/extrajnm
|
6890e38e15f85ea9c09a141aa14affad0b8e91e7
|
[
"MIT"
] | null | null | null |
local/lib/perl5/DateTime/TimeZone/Africa/Casablanca.pm
|
jkb78/extrajnm
|
6890e38e15f85ea9c09a141aa14affad0b8e91e7
|
[
"MIT"
] | null | null | null |
# This file is auto-generated by the Perl DateTime Suite time zone
# code generator (0.07) This code generator comes with the
# DateTime::TimeZone module distribution in the tools/ directory
#
# Generated from /tmp/Q713JNUf8G/africa. Olson data version 2016a
#
# Do not edit this file directly.
#
package DateTime::TimeZone::Africa::Casablanca;
$DateTime::TimeZone::Africa::Casablanca::VERSION = '1.95';
use strict;
use Class::Singleton 1.03;
use DateTime::TimeZone;
use DateTime::TimeZone::OlsonDB;
@DateTime::TimeZone::Africa::Casablanca::ISA = ( 'Class::Singleton', 'DateTime::TimeZone' );
my $spans =
[
[
DateTime::TimeZone::NEG_INFINITY, # utc_start
60362670620, # utc_end 1913-10-26 00:30:20 (Sun)
DateTime::TimeZone::NEG_INFINITY, # local_start
60362668800, # local_end 1913-10-26 00:00:00 (Sun)
-1820,
0,
'LMT',
],
[
60362670620, # utc_start 1913-10-26 00:30:20 (Sun)
61179321600, # utc_end 1939-09-12 00:00:00 (Tue)
60362670620, # local_start 1913-10-26 00:30:20 (Sun)
61179321600, # local_end 1939-09-12 00:00:00 (Tue)
0,
0,
'WET',
],
[
61179321600, # utc_start 1939-09-12 00:00:00 (Tue)
61185193200, # utc_end 1939-11-18 23:00:00 (Sat)
61179325200, # local_start 1939-09-12 01:00:00 (Tue)
61185196800, # local_end 1939-11-19 00:00:00 (Sun)
3600,
1,
'WEST',
],
[
61185193200, # utc_start 1939-11-18 23:00:00 (Sat)
61193664000, # utc_end 1940-02-25 00:00:00 (Sun)
61185193200, # local_start 1939-11-18 23:00:00 (Sat)
61193664000, # local_end 1940-02-25 00:00:00 (Sun)
0,
0,
'WET',
],
[
61193664000, # utc_start 1940-02-25 00:00:00 (Sun)
61374495600, # utc_end 1945-11-17 23:00:00 (Sat)
61193667600, # local_start 1940-02-25 01:00:00 (Sun)
61374499200, # local_end 1945-11-18 00:00:00 (Sun)
3600,
1,
'WEST',
],
[
61374495600, # utc_start 1945-11-17 23:00:00 (Sat)
61518441600, # utc_end 1950-06-11 00:00:00 (Sun)
61374495600, # local_start 1945-11-17 23:00:00 (Sat)
61518441600, # local_end 1950-06-11 00:00:00 (Sun)
0,
0,
'WET',
],
[
61518441600, # utc_start 1950-06-11 00:00:00 (Sun)
61530534000, # utc_end 1950-10-28 23:00:00 (Sat)
61518445200, # local_start 1950-06-11 01:00:00 (Sun)
61530537600, # local_end 1950-10-29 00:00:00 (Sun)
3600,
1,
'WEST',
],
[
61530534000, # utc_start 1950-10-28 23:00:00 (Sat)
62054251200, # utc_end 1967-06-03 12:00:00 (Sat)
61530534000, # local_start 1950-10-28 23:00:00 (Sat)
62054251200, # local_end 1967-06-03 12:00:00 (Sat)
0,
0,
'WET',
],
[
62054251200, # utc_start 1967-06-03 12:00:00 (Sat)
62064572400, # utc_end 1967-09-30 23:00:00 (Sat)
62054254800, # local_start 1967-06-03 13:00:00 (Sat)
62064576000, # local_end 1967-10-01 00:00:00 (Sun)
3600,
1,
'WEST',
],
[
62064572400, # utc_start 1967-09-30 23:00:00 (Sat)
62276947200, # utc_end 1974-06-24 00:00:00 (Mon)
62064572400, # local_start 1967-09-30 23:00:00 (Sat)
62276947200, # local_end 1974-06-24 00:00:00 (Mon)
0,
0,
'WET',
],
[
62276947200, # utc_start 1974-06-24 00:00:00 (Mon)
62282905200, # utc_end 1974-08-31 23:00:00 (Sat)
62276950800, # local_start 1974-06-24 01:00:00 (Mon)
62282908800, # local_end 1974-09-01 00:00:00 (Sun)
3600,
1,
'WEST',
],
[
62282905200, # utc_start 1974-08-31 23:00:00 (Sat)
62335440000, # utc_end 1976-05-01 00:00:00 (Sat)
62282905200, # local_start 1974-08-31 23:00:00 (Sat)
62335440000, # local_end 1976-05-01 00:00:00 (Sat)
0,
0,
'WET',
],
[
62335440000, # utc_start 1976-05-01 00:00:00 (Sat)
62343385200, # utc_end 1976-07-31 23:00:00 (Sat)
62335443600, # local_start 1976-05-01 01:00:00 (Sat)
62343388800, # local_end 1976-08-01 00:00:00 (Sun)
3600,
1,
'WEST',
],
[
62343385200, # utc_start 1976-07-31 23:00:00 (Sat)
62366976000, # utc_end 1977-05-01 00:00:00 (Sun)
62343385200, # local_start 1976-07-31 23:00:00 (Sat)
62366976000, # local_end 1977-05-01 00:00:00 (Sun)
0,
0,
'WET',
],
[
62366976000, # utc_start 1977-05-01 00:00:00 (Sun)
62379932400, # utc_end 1977-09-27 23:00:00 (Tue)
62366979600, # local_start 1977-05-01 01:00:00 (Sun)
62379936000, # local_end 1977-09-28 00:00:00 (Wed)
3600,
1,
'WEST',
],
[
62379932400, # utc_start 1977-09-27 23:00:00 (Tue)
62401190400, # utc_end 1978-06-01 00:00:00 (Thu)
62379932400, # local_start 1977-09-27 23:00:00 (Tue)
62401190400, # local_end 1978-06-01 00:00:00 (Thu)
0,
0,
'WET',
],
[
62401190400, # utc_start 1978-06-01 00:00:00 (Thu)
62406716400, # utc_end 1978-08-03 23:00:00 (Thu)
62401194000, # local_start 1978-06-01 01:00:00 (Thu)
62406720000, # local_end 1978-08-04 00:00:00 (Fri)
3600,
1,
'WEST',
],
[
62406716400, # utc_start 1978-08-03 23:00:00 (Thu)
62583926400, # utc_end 1984-03-16 00:00:00 (Fri)
62406716400, # local_start 1978-08-03 23:00:00 (Thu)
62583926400, # local_end 1984-03-16 00:00:00 (Fri)
0,
0,
'WET',
],
[
62583926400, # utc_start 1984-03-16 00:00:00 (Fri)
62640601200, # utc_end 1985-12-31 23:00:00 (Tue)
62583930000, # local_start 1984-03-16 01:00:00 (Fri)
62640604800, # local_end 1986-01-01 00:00:00 (Wed)
3600,
0,
'CET',
],
[
62640601200, # utc_start 1985-12-31 23:00:00 (Tue)
63347961600, # utc_end 2008-06-01 00:00:00 (Sun)
62640601200, # local_start 1985-12-31 23:00:00 (Tue)
63347961600, # local_end 2008-06-01 00:00:00 (Sun)
0,
0,
'WET',
],
[
63347961600, # utc_start 2008-06-01 00:00:00 (Sun)
63355906800, # utc_end 2008-08-31 23:00:00 (Sun)
63347965200, # local_start 2008-06-01 01:00:00 (Sun)
63355910400, # local_end 2008-09-01 00:00:00 (Mon)
3600,
1,
'WEST',
],
[
63355906800, # utc_start 2008-08-31 23:00:00 (Sun)
63379497600, # utc_end 2009-06-01 00:00:00 (Mon)
63355906800, # local_start 2008-08-31 23:00:00 (Sun)
63379497600, # local_end 2009-06-01 00:00:00 (Mon)
0,
0,
'WET',
],
[
63379497600, # utc_start 2009-06-01 00:00:00 (Mon)
63386492400, # utc_end 2009-08-20 23:00:00 (Thu)
63379501200, # local_start 2009-06-01 01:00:00 (Mon)
63386496000, # local_end 2009-08-21 00:00:00 (Fri)
3600,
1,
'WEST',
],
[
63386492400, # utc_start 2009-08-20 23:00:00 (Thu)
63408441600, # utc_end 2010-05-02 00:00:00 (Sun)
63386492400, # local_start 2009-08-20 23:00:00 (Thu)
63408441600, # local_end 2010-05-02 00:00:00 (Sun)
0,
0,
'WET',
],
[
63408441600, # utc_start 2010-05-02 00:00:00 (Sun)
63416905200, # utc_end 2010-08-07 23:00:00 (Sat)
63408445200, # local_start 2010-05-02 01:00:00 (Sun)
63416908800, # local_end 2010-08-08 00:00:00 (Sun)
3600,
1,
'WEST',
],
[
63416905200, # utc_start 2010-08-07 23:00:00 (Sat)
63437472000, # utc_end 2011-04-03 00:00:00 (Sun)
63416905200, # local_start 2010-08-07 23:00:00 (Sat)
63437472000, # local_end 2011-04-03 00:00:00 (Sun)
0,
0,
'WET',
],
[
63437472000, # utc_start 2011-04-03 00:00:00 (Sun)
63447750000, # utc_end 2011-07-30 23:00:00 (Sat)
63437475600, # local_start 2011-04-03 01:00:00 (Sun)
63447753600, # local_end 2011-07-31 00:00:00 (Sun)
3600,
1,
'WEST',
],
[
63447750000, # utc_start 2011-07-30 23:00:00 (Sat)
63471348000, # utc_end 2012-04-29 02:00:00 (Sun)
63447750000, # local_start 2011-07-30 23:00:00 (Sat)
63471348000, # local_end 2012-04-29 02:00:00 (Sun)
0,
0,
'WET',
],
[
63471348000, # utc_start 2012-04-29 02:00:00 (Sun)
63478432800, # utc_end 2012-07-20 02:00:00 (Fri)
63471351600, # local_start 2012-04-29 03:00:00 (Sun)
63478436400, # local_end 2012-07-20 03:00:00 (Fri)
3600,
1,
'WEST',
],
[
63478432800, # utc_start 2012-07-20 02:00:00 (Fri)
63481111200, # utc_end 2012-08-20 02:00:00 (Mon)
63478432800, # local_start 2012-07-20 02:00:00 (Fri)
63481111200, # local_end 2012-08-20 02:00:00 (Mon)
0,
0,
'WET',
],
[
63481111200, # utc_start 2012-08-20 02:00:00 (Mon)
63484653600, # utc_end 2012-09-30 02:00:00 (Sun)
63481114800, # local_start 2012-08-20 03:00:00 (Mon)
63484657200, # local_end 2012-09-30 03:00:00 (Sun)
3600,
1,
'WEST',
],
[
63484653600, # utc_start 2012-09-30 02:00:00 (Sun)
63502797600, # utc_end 2013-04-28 02:00:00 (Sun)
63484653600, # local_start 2012-09-30 02:00:00 (Sun)
63502797600, # local_end 2013-04-28 02:00:00 (Sun)
0,
0,
'WET',
],
[
63502797600, # utc_start 2013-04-28 02:00:00 (Sun)
63508845600, # utc_end 2013-07-07 02:00:00 (Sun)
63502801200, # local_start 2013-04-28 03:00:00 (Sun)
63508849200, # local_end 2013-07-07 03:00:00 (Sun)
3600,
1,
'WEST',
],
[
63508845600, # utc_start 2013-07-07 02:00:00 (Sun)
63511783200, # utc_end 2013-08-10 02:00:00 (Sat)
63508845600, # local_start 2013-07-07 02:00:00 (Sun)
63511783200, # local_end 2013-08-10 02:00:00 (Sat)
0,
0,
'WET',
],
[
63511783200, # utc_start 2013-08-10 02:00:00 (Sat)
63518522400, # utc_end 2013-10-27 02:00:00 (Sun)
63511786800, # local_start 2013-08-10 03:00:00 (Sat)
63518526000, # local_end 2013-10-27 03:00:00 (Sun)
3600,
1,
'WEST',
],
[
63518522400, # utc_start 2013-10-27 02:00:00 (Sun)
63531828000, # utc_end 2014-03-30 02:00:00 (Sun)
63518522400, # local_start 2013-10-27 02:00:00 (Sun)
63531828000, # local_end 2014-03-30 02:00:00 (Sun)
0,
0,
'WET',
],
[
63531828000, # utc_start 2014-03-30 02:00:00 (Sun)
63539604000, # utc_end 2014-06-28 02:00:00 (Sat)
63531831600, # local_start 2014-03-30 03:00:00 (Sun)
63539607600, # local_end 2014-06-28 03:00:00 (Sat)
3600,
1,
'WEST',
],
[
63539604000, # utc_start 2014-06-28 02:00:00 (Sat)
63542628000, # utc_end 2014-08-02 02:00:00 (Sat)
63539604000, # local_start 2014-06-28 02:00:00 (Sat)
63542628000, # local_end 2014-08-02 02:00:00 (Sat)
0,
0,
'WET',
],
[
63542628000, # utc_start 2014-08-02 02:00:00 (Sat)
63549972000, # utc_end 2014-10-26 02:00:00 (Sun)
63542631600, # local_start 2014-08-02 03:00:00 (Sat)
63549975600, # local_end 2014-10-26 03:00:00 (Sun)
3600,
1,
'WEST',
],
[
63549972000, # utc_start 2014-10-26 02:00:00 (Sun)
63563277600, # utc_end 2015-03-29 02:00:00 (Sun)
63549972000, # local_start 2014-10-26 02:00:00 (Sun)
63563277600, # local_end 2015-03-29 02:00:00 (Sun)
0,
0,
'WET',
],
[
63563277600, # utc_start 2015-03-29 02:00:00 (Sun)
63569930400, # utc_end 2015-06-14 02:00:00 (Sun)
63563281200, # local_start 2015-03-29 03:00:00 (Sun)
63569934000, # local_end 2015-06-14 03:00:00 (Sun)
3600,
1,
'WEST',
],
[
63569930400, # utc_start 2015-06-14 02:00:00 (Sun)
63572954400, # utc_end 2015-07-19 02:00:00 (Sun)
63569930400, # local_start 2015-06-14 02:00:00 (Sun)
63572954400, # local_end 2015-07-19 02:00:00 (Sun)
0,
0,
'WET',
],
[
63572954400, # utc_start 2015-07-19 02:00:00 (Sun)
63581421600, # utc_end 2015-10-25 02:00:00 (Sun)
63572958000, # local_start 2015-07-19 03:00:00 (Sun)
63581425200, # local_end 2015-10-25 03:00:00 (Sun)
3600,
1,
'WEST',
],
[
63581421600, # utc_start 2015-10-25 02:00:00 (Sun)
63594727200, # utc_end 2016-03-27 02:00:00 (Sun)
63581421600, # local_start 2015-10-25 02:00:00 (Sun)
63594727200, # local_end 2016-03-27 02:00:00 (Sun)
0,
0,
'WET',
],
[
63594727200, # utc_start 2016-03-27 02:00:00 (Sun)
63600775200, # utc_end 2016-06-05 02:00:00 (Sun)
63594730800, # local_start 2016-03-27 03:00:00 (Sun)
63600778800, # local_end 2016-06-05 03:00:00 (Sun)
3600,
1,
'WEST',
],
[
63600775200, # utc_start 2016-06-05 02:00:00 (Sun)
63603799200, # utc_end 2016-07-10 02:00:00 (Sun)
63600775200, # local_start 2016-06-05 02:00:00 (Sun)
63603799200, # local_end 2016-07-10 02:00:00 (Sun)
0,
0,
'WET',
],
[
63603799200, # utc_start 2016-07-10 02:00:00 (Sun)
63613476000, # utc_end 2016-10-30 02:00:00 (Sun)
63603802800, # local_start 2016-07-10 03:00:00 (Sun)
63613479600, # local_end 2016-10-30 03:00:00 (Sun)
3600,
1,
'WEST',
],
[
63613476000, # utc_start 2016-10-30 02:00:00 (Sun)
63626176800, # utc_end 2017-03-26 02:00:00 (Sun)
63613476000, # local_start 2016-10-30 02:00:00 (Sun)
63626176800, # local_end 2017-03-26 02:00:00 (Sun)
0,
0,
'WET',
],
[
63626176800, # utc_start 2017-03-26 02:00:00 (Sun)
63631015200, # utc_end 2017-05-21 02:00:00 (Sun)
63626180400, # local_start 2017-03-26 03:00:00 (Sun)
63631018800, # local_end 2017-05-21 03:00:00 (Sun)
3600,
1,
'WEST',
],
[
63631015200, # utc_start 2017-05-21 02:00:00 (Sun)
63634644000, # utc_end 2017-07-02 02:00:00 (Sun)
63631015200, # local_start 2017-05-21 02:00:00 (Sun)
63634644000, # local_end 2017-07-02 02:00:00 (Sun)
0,
0,
'WET',
],
[
63634644000, # utc_start 2017-07-02 02:00:00 (Sun)
63644925600, # utc_end 2017-10-29 02:00:00 (Sun)
63634647600, # local_start 2017-07-02 03:00:00 (Sun)
63644929200, # local_end 2017-10-29 03:00:00 (Sun)
3600,
1,
'WEST',
],
[
63644925600, # utc_start 2017-10-29 02:00:00 (Sun)
63657626400, # utc_end 2018-03-25 02:00:00 (Sun)
63644925600, # local_start 2017-10-29 02:00:00 (Sun)
63657626400, # local_end 2018-03-25 02:00:00 (Sun)
0,
0,
'WET',
],
[
63657626400, # utc_start 2018-03-25 02:00:00 (Sun)
63661860000, # utc_end 2018-05-13 02:00:00 (Sun)
63657630000, # local_start 2018-03-25 03:00:00 (Sun)
63661863600, # local_end 2018-05-13 03:00:00 (Sun)
3600,
1,
'WEST',
],
[
63661860000, # utc_start 2018-05-13 02:00:00 (Sun)
63664884000, # utc_end 2018-06-17 02:00:00 (Sun)
63661860000, # local_start 2018-05-13 02:00:00 (Sun)
63664884000, # local_end 2018-06-17 02:00:00 (Sun)
0,
0,
'WET',
],
[
63664884000, # utc_start 2018-06-17 02:00:00 (Sun)
63676375200, # utc_end 2018-10-28 02:00:00 (Sun)
63664887600, # local_start 2018-06-17 03:00:00 (Sun)
63676378800, # local_end 2018-10-28 03:00:00 (Sun)
3600,
1,
'WEST',
],
[
63676375200, # utc_start 2018-10-28 02:00:00 (Sun)
63689680800, # utc_end 2019-03-31 02:00:00 (Sun)
63676375200, # local_start 2018-10-28 02:00:00 (Sun)
63689680800, # local_end 2019-03-31 02:00:00 (Sun)
0,
0,
'WET',
],
[
63689680800, # utc_start 2019-03-31 02:00:00 (Sun)
63692704800, # utc_end 2019-05-05 02:00:00 (Sun)
63689684400, # local_start 2019-03-31 03:00:00 (Sun)
63692708400, # local_end 2019-05-05 03:00:00 (Sun)
3600,
1,
'WEST',
],
[
63692704800, # utc_start 2019-05-05 02:00:00 (Sun)
63695728800, # utc_end 2019-06-09 02:00:00 (Sun)
63692704800, # local_start 2019-05-05 02:00:00 (Sun)
63695728800, # local_end 2019-06-09 02:00:00 (Sun)
0,
0,
'WET',
],
[
63695728800, # utc_start 2019-06-09 02:00:00 (Sun)
63707824800, # utc_end 2019-10-27 02:00:00 (Sun)
63695732400, # local_start 2019-06-09 03:00:00 (Sun)
63707828400, # local_end 2019-10-27 03:00:00 (Sun)
3600,
1,
'WEST',
],
[
63707824800, # utc_start 2019-10-27 02:00:00 (Sun)
63721130400, # utc_end 2020-03-29 02:00:00 (Sun)
63707824800, # local_start 2019-10-27 02:00:00 (Sun)
63721130400, # local_end 2020-03-29 02:00:00 (Sun)
0,
0,
'WET',
],
[
63721130400, # utc_start 2020-03-29 02:00:00 (Sun)
63722944800, # utc_end 2020-04-19 02:00:00 (Sun)
63721134000, # local_start 2020-03-29 03:00:00 (Sun)
63722948400, # local_end 2020-04-19 03:00:00 (Sun)
3600,
1,
'WEST',
],
[
63722944800, # utc_start 2020-04-19 02:00:00 (Sun)
63725968800, # utc_end 2020-05-24 02:00:00 (Sun)
63722944800, # local_start 2020-04-19 02:00:00 (Sun)
63725968800, # local_end 2020-05-24 02:00:00 (Sun)
0,
0,
'WET',
],
[
63725968800, # utc_start 2020-05-24 02:00:00 (Sun)
63739274400, # utc_end 2020-10-25 02:00:00 (Sun)
63725972400, # local_start 2020-05-24 03:00:00 (Sun)
63739278000, # local_end 2020-10-25 03:00:00 (Sun)
3600,
1,
'WEST',
],
[
63739274400, # utc_start 2020-10-25 02:00:00 (Sun)
63752580000, # utc_end 2021-03-28 02:00:00 (Sun)
63739274400, # local_start 2020-10-25 02:00:00 (Sun)
63752580000, # local_end 2021-03-28 02:00:00 (Sun)
0,
0,
'WET',
],
[
63752580000, # utc_start 2021-03-28 02:00:00 (Sun)
63753789600, # utc_end 2021-04-11 02:00:00 (Sun)
63752583600, # local_start 2021-03-28 03:00:00 (Sun)
63753793200, # local_end 2021-04-11 03:00:00 (Sun)
3600,
1,
'WEST',
],
[
63753789600, # utc_start 2021-04-11 02:00:00 (Sun)
63756813600, # utc_end 2021-05-16 02:00:00 (Sun)
63753789600, # local_start 2021-04-11 02:00:00 (Sun)
63756813600, # local_end 2021-05-16 02:00:00 (Sun)
0,
0,
'WET',
],
[
63756813600, # utc_start 2021-05-16 02:00:00 (Sun)
63771328800, # utc_end 2021-10-31 02:00:00 (Sun)
63756817200, # local_start 2021-05-16 03:00:00 (Sun)
63771332400, # local_end 2021-10-31 03:00:00 (Sun)
3600,
1,
'WEST',
],
[
63771328800, # utc_start 2021-10-31 02:00:00 (Sun)
63787658400, # utc_end 2022-05-08 02:00:00 (Sun)
63771328800, # local_start 2021-10-31 02:00:00 (Sun)
63787658400, # local_end 2022-05-08 02:00:00 (Sun)
0,
0,
'WET',
],
[
63787658400, # utc_start 2022-05-08 02:00:00 (Sun)
63802778400, # utc_end 2022-10-30 02:00:00 (Sun)
63787662000, # local_start 2022-05-08 03:00:00 (Sun)
63802782000, # local_end 2022-10-30 03:00:00 (Sun)
3600,
1,
'WEST',
],
[
63802778400, # utc_start 2022-10-30 02:00:00 (Sun)
63817898400, # utc_end 2023-04-23 02:00:00 (Sun)
63802778400, # local_start 2022-10-30 02:00:00 (Sun)
63817898400, # local_end 2023-04-23 02:00:00 (Sun)
0,
0,
'WET',
],
[
63817898400, # utc_start 2023-04-23 02:00:00 (Sun)
63834228000, # utc_end 2023-10-29 02:00:00 (Sun)
63817902000, # local_start 2023-04-23 03:00:00 (Sun)
63834231600, # local_end 2023-10-29 03:00:00 (Sun)
3600,
1,
'WEST',
],
[
63834228000, # utc_start 2023-10-29 02:00:00 (Sun)
63848743200, # utc_end 2024-04-14 02:00:00 (Sun)
63834228000, # local_start 2023-10-29 02:00:00 (Sun)
63848743200, # local_end 2024-04-14 02:00:00 (Sun)
0,
0,
'WET',
],
[
63848743200, # utc_start 2024-04-14 02:00:00 (Sun)
63865677600, # utc_end 2024-10-27 02:00:00 (Sun)
63848746800, # local_start 2024-04-14 03:00:00 (Sun)
63865681200, # local_end 2024-10-27 03:00:00 (Sun)
3600,
1,
'WEST',
],
[
63865677600, # utc_start 2024-10-27 02:00:00 (Sun)
63879588000, # utc_end 2025-04-06 02:00:00 (Sun)
63865677600, # local_start 2024-10-27 02:00:00 (Sun)
63879588000, # local_end 2025-04-06 02:00:00 (Sun)
0,
0,
'WET',
],
[
63879588000, # utc_start 2025-04-06 02:00:00 (Sun)
63897127200, # utc_end 2025-10-26 02:00:00 (Sun)
63879591600, # local_start 2025-04-06 03:00:00 (Sun)
63897130800, # local_end 2025-10-26 03:00:00 (Sun)
3600,
1,
'WEST',
],
[
63897127200, # utc_start 2025-10-26 02:00:00 (Sun)
63910432800, # utc_end 2026-03-29 02:00:00 (Sun)
63897127200, # local_start 2025-10-26 02:00:00 (Sun)
63910432800, # local_end 2026-03-29 02:00:00 (Sun)
0,
0,
'WET',
],
[
63910432800, # utc_start 2026-03-29 02:00:00 (Sun)
63928576800, # utc_end 2026-10-25 02:00:00 (Sun)
63910436400, # local_start 2026-03-29 03:00:00 (Sun)
63928580400, # local_end 2026-10-25 03:00:00 (Sun)
3600,
1,
'WEST',
],
[
63928576800, # utc_start 2026-10-25 02:00:00 (Sun)
63941882400, # utc_end 2027-03-28 02:00:00 (Sun)
63928576800, # local_start 2026-10-25 02:00:00 (Sun)
63941882400, # local_end 2027-03-28 02:00:00 (Sun)
0,
0,
'WET',
],
[
63941882400, # utc_start 2027-03-28 02:00:00 (Sun)
63960631200, # utc_end 2027-10-31 02:00:00 (Sun)
63941886000, # local_start 2027-03-28 03:00:00 (Sun)
63960634800, # local_end 2027-10-31 03:00:00 (Sun)
3600,
1,
'WEST',
],
[
63960631200, # utc_start 2027-10-31 02:00:00 (Sun)
63973332000, # utc_end 2028-03-26 02:00:00 (Sun)
63960631200, # local_start 2027-10-31 02:00:00 (Sun)
63973332000, # local_end 2028-03-26 02:00:00 (Sun)
0,
0,
'WET',
],
[
63973332000, # utc_start 2028-03-26 02:00:00 (Sun)
63992080800, # utc_end 2028-10-29 02:00:00 (Sun)
63973335600, # local_start 2028-03-26 03:00:00 (Sun)
63992084400, # local_end 2028-10-29 03:00:00 (Sun)
3600,
1,
'WEST',
],
[
63992080800, # utc_start 2028-10-29 02:00:00 (Sun)
64004781600, # utc_end 2029-03-25 02:00:00 (Sun)
63992080800, # local_start 2028-10-29 02:00:00 (Sun)
64004781600, # local_end 2029-03-25 02:00:00 (Sun)
0,
0,
'WET',
],
[
64004781600, # utc_start 2029-03-25 02:00:00 (Sun)
64023530400, # utc_end 2029-10-28 02:00:00 (Sun)
64004785200, # local_start 2029-03-25 03:00:00 (Sun)
64023534000, # local_end 2029-10-28 03:00:00 (Sun)
3600,
1,
'WEST',
],
[
64023530400, # utc_start 2029-10-28 02:00:00 (Sun)
64036836000, # utc_end 2030-03-31 02:00:00 (Sun)
64023530400, # local_start 2029-10-28 02:00:00 (Sun)
64036836000, # local_end 2030-03-31 02:00:00 (Sun)
0,
0,
'WET',
],
[
64036836000, # utc_start 2030-03-31 02:00:00 (Sun)
64054980000, # utc_end 2030-10-27 02:00:00 (Sun)
64036839600, # local_start 2030-03-31 03:00:00 (Sun)
64054983600, # local_end 2030-10-27 03:00:00 (Sun)
3600,
1,
'WEST',
],
[
64054980000, # utc_start 2030-10-27 02:00:00 (Sun)
64068285600, # utc_end 2031-03-30 02:00:00 (Sun)
64054980000, # local_start 2030-10-27 02:00:00 (Sun)
64068285600, # local_end 2031-03-30 02:00:00 (Sun)
0,
0,
'WET',
],
[
64068285600, # utc_start 2031-03-30 02:00:00 (Sun)
64086429600, # utc_end 2031-10-26 02:00:00 (Sun)
64068289200, # local_start 2031-03-30 03:00:00 (Sun)
64086433200, # local_end 2031-10-26 03:00:00 (Sun)
3600,
1,
'WEST',
],
[
64086429600, # utc_start 2031-10-26 02:00:00 (Sun)
64099735200, # utc_end 2032-03-28 02:00:00 (Sun)
64086429600, # local_start 2031-10-26 02:00:00 (Sun)
64099735200, # local_end 2032-03-28 02:00:00 (Sun)
0,
0,
'WET',
],
[
64099735200, # utc_start 2032-03-28 02:00:00 (Sun)
64118484000, # utc_end 2032-10-31 02:00:00 (Sun)
64099738800, # local_start 2032-03-28 03:00:00 (Sun)
64118487600, # local_end 2032-10-31 03:00:00 (Sun)
3600,
1,
'WEST',
],
[
64118484000, # utc_start 2032-10-31 02:00:00 (Sun)
64131184800, # utc_end 2033-03-27 02:00:00 (Sun)
64118484000, # local_start 2032-10-31 02:00:00 (Sun)
64131184800, # local_end 2033-03-27 02:00:00 (Sun)
0,
0,
'WET',
],
[
64131184800, # utc_start 2033-03-27 02:00:00 (Sun)
64149933600, # utc_end 2033-10-30 02:00:00 (Sun)
64131188400, # local_start 2033-03-27 03:00:00 (Sun)
64149937200, # local_end 2033-10-30 03:00:00 (Sun)
3600,
1,
'WEST',
],
[
64149933600, # utc_start 2033-10-30 02:00:00 (Sun)
64162634400, # utc_end 2034-03-26 02:00:00 (Sun)
64149933600, # local_start 2033-10-30 02:00:00 (Sun)
64162634400, # local_end 2034-03-26 02:00:00 (Sun)
0,
0,
'WET',
],
[
64162634400, # utc_start 2034-03-26 02:00:00 (Sun)
64181383200, # utc_end 2034-10-29 02:00:00 (Sun)
64162638000, # local_start 2034-03-26 03:00:00 (Sun)
64181386800, # local_end 2034-10-29 03:00:00 (Sun)
3600,
1,
'WEST',
],
[
64181383200, # utc_start 2034-10-29 02:00:00 (Sun)
64194084000, # utc_end 2035-03-25 02:00:00 (Sun)
64181383200, # local_start 2034-10-29 02:00:00 (Sun)
64194084000, # local_end 2035-03-25 02:00:00 (Sun)
0,
0,
'WET',
],
[
64194084000, # utc_start 2035-03-25 02:00:00 (Sun)
64212832800, # utc_end 2035-10-28 02:00:00 (Sun)
64194087600, # local_start 2035-03-25 03:00:00 (Sun)
64212836400, # local_end 2035-10-28 03:00:00 (Sun)
3600,
1,
'WEST',
],
[
64212832800, # utc_start 2035-10-28 02:00:00 (Sun)
64226138400, # utc_end 2036-03-30 02:00:00 (Sun)
64212832800, # local_start 2035-10-28 02:00:00 (Sun)
64226138400, # local_end 2036-03-30 02:00:00 (Sun)
0,
0,
'WET',
],
[
64226138400, # utc_start 2036-03-30 02:00:00 (Sun)
64243677600, # utc_end 2036-10-19 02:00:00 (Sun)
64226142000, # local_start 2036-03-30 03:00:00 (Sun)
64243681200, # local_end 2036-10-19 03:00:00 (Sun)
3600,
1,
'WEST',
],
[
64243677600, # utc_start 2036-10-19 02:00:00 (Sun)
64257588000, # utc_end 2037-03-29 02:00:00 (Sun)
64243677600, # local_start 2036-10-19 02:00:00 (Sun)
64257588000, # local_end 2037-03-29 02:00:00 (Sun)
0,
0,
'WET',
],
[
64257588000, # utc_start 2037-03-29 02:00:00 (Sun)
64275732000, # utc_end 2037-10-25 02:00:00 (Sun)
64257591600, # local_start 2037-03-29 03:00:00 (Sun)
64275735600, # local_end 2037-10-25 03:00:00 (Sun)
3600,
1,
'WEST',
],
[
64275732000, # utc_start 2037-10-25 02:00:00 (Sun)
64289037600, # utc_end 2038-03-28 02:00:00 (Sun)
64275732000, # local_start 2037-10-25 02:00:00 (Sun)
64289037600, # local_end 2038-03-28 02:00:00 (Sun)
0,
0,
'WET',
],
[
64289037600, # utc_start 2038-03-28 02:00:00 (Sun)
64307786400, # utc_end 2038-10-31 02:00:00 (Sun)
64289041200, # local_start 2038-03-28 03:00:00 (Sun)
64307790000, # local_end 2038-10-31 03:00:00 (Sun)
3600,
1,
'WEST',
],
];
sub olson_version {'2016a'}
sub has_dst_changes {49}
sub _max_year {2037}
sub _new_instance {
return shift->_init( @_, spans => $spans );
}
sub _last_offset { 0 }
my $last_observance = bless( {
'format' => 'WE%sT',
'gmtoff' => '0:00',
'local_start_datetime' => bless( {
'formatter' => undef,
'local_rd_days' => 725006,
'local_rd_secs' => 82800,
'offset_modifier' => 0,
'rd_nanosecs' => 0,
'tz' => bless( {
'name' => 'floating',
'offset' => 0
}, 'DateTime::TimeZone::Floating' ),
'utc_rd_days' => 725006,
'utc_rd_secs' => 82800,
'utc_year' => 1986
}, 'DateTime' ),
'offset_from_std' => 0,
'offset_from_utc' => 0,
'until' => [],
'utc_start_datetime' => bless( {
'formatter' => undef,
'local_rd_days' => 725006,
'local_rd_secs' => 82800,
'offset_modifier' => 0,
'rd_nanosecs' => 0,
'tz' => bless( {
'name' => 'floating',
'offset' => 0
}, 'DateTime::TimeZone::Floating' ),
'utc_rd_days' => 725006,
'utc_rd_secs' => 82800,
'utc_year' => 1986
}, 'DateTime' )
}, 'DateTime::TimeZone::OlsonDB::Observance' )
;
sub _last_observance { $last_observance }
my $rules = [
bless( {
'at' => '3:00',
'from' => '2013',
'in' => 'Oct',
'letter' => '',
'name' => 'Morocco',
'offset_from_std' => 0,
'on' => 'lastSun',
'save' => '0',
'to' => 'max',
'type' => undef
}, 'DateTime::TimeZone::OlsonDB::Rule' ),
bless( {
'at' => '2:00',
'from' => '2026',
'in' => 'Mar',
'letter' => 'S',
'name' => 'Morocco',
'offset_from_std' => 3600,
'on' => 'lastSun',
'save' => '1:00',
'to' => 'max',
'type' => undef
}, 'DateTime::TimeZone::OlsonDB::Rule' )
]
;
sub _rules { $rules }
1;
| 26.619704 | 92 | 0.630445 |
73f0aeaaf3c8ec0422407cd0c0e50f1233330993
| 7,355 |
t
|
Perl
|
classes/objects/bullet.t
|
DropDemBits/turing-3l-tankz
|
1da593ff8612299011339c9f316149489a097bd1
|
[
"MIT"
] | null | null | null |
classes/objects/bullet.t
|
DropDemBits/turing-3l-tankz
|
1da593ff8612299011339c9f316149489a097bd1
|
[
"MIT"
] | null | null | null |
classes/objects/bullet.t
|
DropDemBits/turing-3l-tankz
|
1da593ff8612299011339c9f316149489a097bd1
|
[
"MIT"
] | null | null | null |
% Base class for all of the bullets
unit
class BulletObject
inherit Object in "object.t"
import PlayerObject in "player.t"
export setOwner, getOwner, canKillOwner
const RADIUS : real := 5
const BULLET_BOX : array 1 .. 4, 1 .. 2 of real := init (
-RADIUS, -RADIUS,
+RADIUS, -RADIUS,
+RADIUS, +RADIUS,
-RADIUS, +RADIUS,
)
% Current lifespan of the bullet. Will live for 15 seconds
var lifespan : real := 15000
% Last collision checkss done by this bullet
var lastCollideChecks_ : int := 0
% Number of collisions bullet has gone through
var collisionCount : int := 0
% Owning player of this bullet
var owner_ : ^PlayerObject
% Whether the bullet can kill its owner. It can't initially
var canKillOwner_ : boolean := false
/**
* Sets the owner of this bullet
*/
proc setOwner (owner__ : ^PlayerObject)
owner_ := owner__
end setOwner
/**
* Gets the owner of this bullet
*/
fcn getOwner () : ^PlayerObject
result owner_
end getOwner
/**
* If this bullet can kill its owner
*/
fcn canKillOwner () : boolean
result canKillOwner_
end canKillOwner
body proc onInitObj
% Set up speed
speed := 2 / Level.TILE_SIZE
% Setup bounding boxes
for i : 1 .. upper (BULLET_BOX, 1)
objectBox (i, 1) := BULLET_BOX (i, 1)
objectBox (i, 2) := BULLET_BOX (i, 2)
end for
objectAABB (1, 1) := BULLET_BOX (1, 1)
objectAABB (1, 2) := BULLET_BOX (1, 2)
objectAABB (2, 1) := BULLET_BOX (3, 1)
objectAABB (2, 2) := BULLET_BOX (3, 2)
end onInitObj
body proc update
% Reduce the lifespan
lifespan -= elapsed
if lifespan < 0 then
setDead ()
% Bullet is now dead, don't do anything
return
end if
% Update position
posX += speed * cosd (angle)
posY += speed * sind (angle)
% If we're performing the poof, don't do anything
if lifespan < 250 then
isDead_ := true
return
end if
% Update owner kill status
if sqrt ((posX - owner_ -> posX) ** 2 + (posY - owner_ -> posY) ** 2) > 0.5
or lifespan < lifespan - 5000 then
% Allow the ability to kill the owner after going a tile away, or for
% existing after 5 seconds
canKillOwner_ := true
end if
% Check for any collisions
var atTX, atTY : int
atTX := round (posX - 0.5)
atTY := round (posY - 0.5)
var tileEdges : int := 0
var tileOffX : int := 0
var tileOffY : int := 0
tileEdges := level -> getEdges (atTX + tileOffX, atTY + tileOffY)
% Only check for collision if the tile has edges
locate (1, 1)
if tileEdges not= -1 then
var hasCollided : boolean := false
% Test for collision against all edges
for edge : 0 .. 3
% Test only if the edge exists,
% Perform coarse collision detection
if (tileEdges & (1 shl edge)) not= 0
and isColliding (atTX + tileOffX, atTY + tileOffY, edge, objectBox) then
% Reverse our movements
posX -= + speed * cosd (angle)
posY -= + speed * sind (angle)
% Investigate the collision point further
var stepSpeed : real := speed / 10
for steps : 1 .. 10
% Keep advancing the position until we hit the collision point
exit when isColliding (atTX + tileOffX, atTY + tileOffY, edge, objectBox)
posX += + stepSpeed * cosd (angle)
posY += + stepSpeed * sind (angle)
end for
% Collision detected, reflect the angle
% Angle will be reflected by breaking it down into the
% respective x and y components, flipping the sign
% as appropriate, and converting it back into an angle.
var amtX, amtY : real
% Flip the appropriate sign
case edge of
label Level.DIR_RIGHT, Level.DIR_LEFT: amtX := -cosd (angle) amtY := +sind (angle)
label Level.DIR_UP, Level.DIR_DOWN: amtX := +cosd (angle) amtY := -sind (angle)
end case
% Convert back into an angle
angle := atan2d (amtY, amtX)
% Displace self out of wall
posX += speed * cosd (angle) * 0.5
posY += speed * sind (angle) * 0.5
if collisionCount > 2 then
% Force out the bullet in a perpendicular direction
posX += speed * cosd (angle + 90) * 2 * (collisionCount / 10)
posY += speed * sind (angle + 90) * 2 * (collisionCount / 10)
end if
% Definitely collided collided
hasCollided |= true
end if
% Check done for this edge
end for
% Update continuous collison count
% If we have just collided, the collision coubter is incremented
% Otherwise, it is reset to 0
if hasCollided then
collisionCount += 1
else
collisionCount := 0
end if
end if
end update
body proc render
var effX, effY : real
effX := offX + (posX + speed * cosd (angle) * partialTicks) * Level.TILE_SIZE
effY := offY + (posY + speed * sind (angle) * partialTicks) * Level.TILE_SIZE
if lifespan < 250 then
% Less than 0.25 seconds left, perform poof
var poofPercent : real := 1 - (0.75 - (lifespan / 250) ** 2)
var poofRad : int := round (5 * abs(poofPercent))
drawfilloval (round (effX), round (effY), poofRad, poofRad, 24)
else
% Draw normal bullet
drawfilloval (round (effX), round (effY), 6, 6, owner_ -> base_colour)
drawfilloval (round (effX), round (effY), 5, 5, black)
end if
if offX + posX * Level.TILE_SIZE < 0 or offX + posX * Level.TILE_SIZE > maxx + RADIUS
or offY + posY * Level.TILE_SIZE < 0 or offY + posY * Level.TILE_SIZE > maxy + RADIUS then
% Bullet is outside of screen, kill immediately
setDead ()
end if
end render
end BulletObject
| 36.959799 | 107 | 0.481849 |
ed6ee0709cef5e86bbc422d8e0cc5aee7dbfa461
| 1,896 |
pm
|
Perl
|
network/dell/os10/snmp/plugin.pm
|
xdrive05/centreon-plugins
|
8227ba680fdfd2bb0d8a806ea61ec1611c2779dc
|
[
"Apache-2.0"
] | 1 |
2021-03-16T22:20:32.000Z
|
2021-03-16T22:20:32.000Z
|
network/dell/os10/snmp/plugin.pm
|
xdrive05/centreon-plugins
|
8227ba680fdfd2bb0d8a806ea61ec1611c2779dc
|
[
"Apache-2.0"
] | null | null | null |
network/dell/os10/snmp/plugin.pm
|
xdrive05/centreon-plugins
|
8227ba680fdfd2bb0d8a806ea61ec1611c2779dc
|
[
"Apache-2.0"
] | null | null | null |
#
# Copyright 2020 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package network::dell::os10::snmp::plugin;
use strict;
use warnings;
use base qw(centreon::plugins::script_snmp);
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
$self->{version} = '1.0';
%{$self->{modes}} = (
'cpu' => 'snmp_standard::mode::cpu',
'disk-usage' => 'snmp_standard::mode::diskusage',
'hardware' => 'network::dell::os10::snmp::mode::hardware',
'inodes' => 'snmp_standard::mode::inodes',
'interfaces' => 'snmp_standard::mode::interfaces',
'load' => 'snmp_standard::mode::loadaverage',
'list-interfaces' => 'snmp_standard::mode::listinterfaces',
'memory' => 'snmp_standard::mode::memory',
'swap' => 'snmp_standard::mode::swap',
'uptime' => 'snmp_standard::mode::uptime'
);
return $self;
}
1;
__END__
=head1 PLUGIN DESCRIPTION
Check Dell OS10 Operating System in SNMP. It's based on Linux.
The legacy version is 0S9, FTOS or Force10 operating system.
=cut
| 32.135593 | 74 | 0.650316 |
ed5770936da83bd9f7ef102cb9c58030b96075b3
| 1,826 |
pm
|
Perl
|
openkore-master/src/Network/Send/kRO/RagexeRE_2009_12_08a.pm
|
phuchduong/ro_restart_bot
|
41da6e1def82d05341433961ca0f071ad4424b60
|
[
"Apache-2.0"
] | null | null | null |
openkore-master/src/Network/Send/kRO/RagexeRE_2009_12_08a.pm
|
phuchduong/ro_restart_bot
|
41da6e1def82d05341433961ca0f071ad4424b60
|
[
"Apache-2.0"
] | null | null | null |
openkore-master/src/Network/Send/kRO/RagexeRE_2009_12_08a.pm
|
phuchduong/ro_restart_bot
|
41da6e1def82d05341433961ca0f071ad4424b60
|
[
"Apache-2.0"
] | null | null | null |
#########################################################################
# OpenKore - Packet sending
# This module contains functions for sending packets to the server.
#
# This software is open source, licensed under the GNU General Public
# License, version 2.
# Basically, this means that you're allowed to modify and distribute
# this software. However, if you distribute modified versions, you MUST
# also distribute the source code.
# See http://www.gnu.org/licenses/gpl.html for the full license.
#
# $Revision: 6687 $
# $Id: kRO.pm 6687 2009-04-19 19:04:25Z technologyguild $
########################################################################
# Korea (kRO)
# The majority of private servers use eAthena, this is a clone of kRO
package Network::Send::kRO::RagexeRE_2009_12_08a;
use strict;
use base qw(Network::Send::kRO::RagexeRE_2009_11_24a);
use Log qw(debug);
sub new {
my ($class) = @_;
my $self = $class->SUPER::new(@_);
my %packets = (
'0134' => undef,
'0801' => ['buy_bulk_vender', 'x2 a4 a4 a*', [qw(venderID venderCID itemInfo)]],
);
$self->{packet_list}{$_} = $packets{$_} for keys %packets;
my %handlers = qw(
buy_bulk_vender 0801
);
$self->{packet_lut}{$_} = $handlers{$_} for keys %handlers;
$self;
}
# TODO: exact location packet?
# 0x0801,-1,purchasereq,2:4:8:12
=pod
0008
4e00
a8b33000 venderid
a1000000 161 (venderCID?)
b80b0000640202000262020100000000000000000000
80a81201010003000447090100090000000000000000
80a81201010004000447090100090000000000000000
0108
1000
a8b33000 venderid
a1000000 161 (venderCID?)
0100
0200
0008
2200
6a5b0c00 venderid
a7000000 167 (venderCID?)
200b2000280002000268020100000000000000000000
=cut
=pod
//2009-12-08aRagexeRE
0x0800,-1
0x0801,-1,purchasereq,2:4:8:12
=cut
1;
| 24.026316 | 82 | 0.653888 |
ed2bcb24ceb454dcf23a0070c8b5a3ba33e00a2e
| 750 |
pm
|
Perl
|
lib/MusicBrainz/Server/Entity/URL/Spotify.pm
|
monicaq21/musicbrainz-server
|
a570d0ed9aaef0db4fe063488e5a016861f6eb10
|
[
"BSD-2-Clause"
] | 2 |
2019-03-14T05:31:35.000Z
|
2019-03-14T05:32:01.000Z
|
lib/MusicBrainz/Server/Entity/URL/Spotify.pm
|
monicaq21/musicbrainz-server
|
a570d0ed9aaef0db4fe063488e5a016861f6eb10
|
[
"BSD-2-Clause"
] | 2 |
2021-05-12T00:15:55.000Z
|
2022-02-14T04:56:24.000Z
|
lib/MusicBrainz/Server/Entity/URL/Spotify.pm
|
monicaq21/musicbrainz-server
|
a570d0ed9aaef0db4fe063488e5a016861f6eb10
|
[
"BSD-2-Clause"
] | null | null | null |
package MusicBrainz::Server::Entity::URL::Spotify;
use Moose;
use MusicBrainz::Server::Translation qw( l );
extends 'MusicBrainz::Server::Entity::URL';
with 'MusicBrainz::Server::Entity::URL::Sidebar';
sub sidebar_name {
my $self = shift;
if ($self->url =~ m{^(?:https?:)?//(?:[^/]+.)?spotify.com/user/[^/?&#]+/?}i) {
return l('Playlists at Spotify');
} else {
return l('Stream at Spotify');
}
};
__PACKAGE__->meta->make_immutable;
no Moose;
1;
=head1 COPYRIGHT AND LICENSE
Copyright (C) 2013 MetaBrainz Foundation
This file is part of MusicBrainz, the open internet music database,
and is licensed under the GPL version 2, or (at your option) any
later version: http://www.gnu.org/licenses/gpl-2.0.txt
=cut
| 22.727273 | 82 | 0.669333 |
ed04e77221e3a7623be8a61175edd2880e0ea20a
| 5,393 |
pl
|
Perl
|
bin/Add_to_FULL.pl
|
vallurumk/vast-tools
|
0b34a1b3f2f9b8fefb603204b13852f09799de63
|
[
"MIT"
] | 1 |
2019-11-29T12:01:08.000Z
|
2019-11-29T12:01:08.000Z
|
bin/Add_to_FULL.pl
|
sbajew/vast-tools
|
5bac6f7bd9f10253ebd8d75ca1b678c1e2fb51bc
|
[
"MIT"
] | null | null | null |
bin/Add_to_FULL.pl
|
sbajew/vast-tools
|
5bac6f7bd9f10253ebd8d75ca1b678c1e2fb51bc
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env perl
#
# Final "Add_to_*" script that combines all previous AS event type-specific PSI
# tables into one final ("FULL") table.
#
# Event IDs are also converted to the "new" IDs, which are specified in the
# library file "New_ID-*.txt.gz" in VASTDB/FILES/ directory
use strict;
use warnings;
use FindBin;
use lib "$FindBin::Bin/../lib";
use FuncBasics qw(:all);
use Getopt::Long;
my $dbDir;
my $sp;
my $samLen;
my $verboseFlag;
GetOptions("sp=s" => \$sp, "dbDir=s" => \$dbDir, "len=i" => \$samLen,
"verbose=i" => \$verboseFlag);
###############################################################################
our $EXIT_STATUS = 0;
sub verbPrint {
my $verbMsg = shift;
if($verboseFlag) {
chomp($verbMsg);
print STDERR "[vast combine convert]: $verbMsg\n";
}
}
sub errPrint {
my $errMsg = shift;
print STDERR "[vast combine error]: $errMsg\n";
$EXIT_STATUS++;
}
sub errPrintDie {
my $errMsg = shift;
errPrint $errMsg;
exit $EXIT_STATUS if ($EXIT_STATUS != 0);
}
sub simplifyComplex {
# Ad hoc routine to simplify COMPLEX types
# (should eventually be simplified in the template source files)
my $type = shift;
$type =~ s/\*//;
if ($type =~ /^ME\(.*\)$/) {
$type = "C3";
}
elsif ($type =~ /MIC/) {
$type = "MIC";
}
elsif ($type =~ /A\_/){
$type = "ANN";
}
return $type;
}
sub reorderColumns {
# Re-order columns if input files don't have the same sample ordering
my $columns = shift;
my $refOrder = shift;
my @newOrder = (0) x keys %{$refOrder};
for (my $i = 0; $i < @{$columns}; $i++) {
# Iterate through each column, find out it's actual position based on the
# original sample ordering from previous input file
my $pos = $refOrder->{$columns->[$i]};
$newOrder[$pos] = $i;
}
return @newOrder;
}
###############################################################################
# Load conversation table file to memory
my $NEWID = openFileHandle("$dbDir/FILES/New_ID-$sp.txt.gz");
my %newIDs;
while (<$NEWID>) {
chomp;
my @l = split("\t");
# Removed in V2 (18/01/18): if repeated (due to assembly conversion) are re-rewritten
# if (defined $newIDs{$l[1]}) {
# die "Non-unique key value pair in $NEWID!\n";
# }
# else {
# $newIDs{$l[1]} = $l[0]; # old_ID => new_ID
# }
$newIDs{$l[1]} = $l[0]; # old_ID => new_ID
# to correct a small discordance in old human/mouse IDs --MI [23/12/15]
if ($l[0] =~ /INT/ && $l[1] =~ /^\-/){
my $temp_ID = "NA".$l[1];
$newIDs{$temp_ID} = $l[0];
}
}
close $NEWID;
# Loads the template
my $TEMPLATE = openFileHandle("$dbDir/TEMPLATES/$sp.FULL.Template.txt.gz");
my $h = <$TEMPLATE>;
chomp $h;
my @header = split(/\t/, $h);
@header = @header[0..5];
my %template;
while (<$TEMPLATE>){
chomp;
my @l = split(/\t/);
# Requirement removed in V2 (if repeated, overwrites)
# if (defined $template{$l[1]}) {
# die "Non-unique key value pair in $TEMPLATE!\n";
# }
@l = @l[0..5]; # to make sure unexpected extras are not included --MI [30/12/15] (old: \@l)
$template{$l[1]} = \@l;
# to correct a small discordance in old human/mouse IDs --MI [23/12/15]
if ($l[5] =~ /IR/ && $l[1] =~ /^\-/){
my $temp_ID = "NA".$l[1];
$template{$temp_ID} = \@l;
}
}
close $TEMPLATE;
# Load input data
my %done;
my $sawHeader = 0;
my @samples;
my $headerCount = 0; # count number of columns
my %headerOrder; # store order of samples
my @newOrder; # used for fixing out-of-order headers
my %seen_sample; # keeps the samples that were seen in the first file; if not matched in the rest, die
while (<STDIN>) {
chomp;
my @l = split(/\t/);
my @sampleCols = @l[6..$#l];
# Check headers
if (/^GENE\tEVENT/) {
$headerCount++;
#### Check that samples are the exact ones, irrespective of the order (31/01/18 --MI)
foreach my $temp_sample (@sampleCols){
if ($headerCount == 1){ # first table
$seen_sample{$temp_sample}=1;
}
else {
errPrintDie("Sample $temp_sample is not in all tables\n") if (!defined $seen_sample{$temp_sample});
}
}
if (!$sawHeader) {
push @header, @sampleCols;
$sawHeader = @l; # store number of expected columns
print STDOUT join("\t", @header) . "\n";
for (my $i=0; $i < @sampleCols; $i++) {
$headerOrder{$sampleCols[$i]} = $i;
push @newOrder, $i;
}
@samples = @sampleCols;
} elsif ($sawHeader != @l) {
die "Number of columns in subsequent header of input file $headerCount" .
" does not match. Terminating!\n";
} elsif (!(@samples ~~ @sampleCols)) {
print STDERR "Inconsistent ordering of samples in input file $headerCount!" .
" Re-ordering columns.\n";
@newOrder = reorderColumns(\@sampleCols, \%headerOrder);
} else {
@newOrder = sort {$a <=> $b} values %headerOrder;
}
next;
}
# Check if input is found in template
if ($template{$l[1]}) {
my @prefix = @{$template{$l[1]}};
my $eventType = $prefix[5];
if ($newIDs{$prefix[1]}) {
$prefix[1] = $newIDs{$prefix[1]};
$prefix[5] = simplifyComplex($prefix[5]); # simplify complex codes
print STDOUT join("\t", (@prefix, @sampleCols[@newOrder])) . "\n"
unless $done{$eventType}{$l[2]};
$done{$eventType}{$l[2]} = 1;
}
}
}
| 26.179612 | 109 | 0.569813 |
ed75046922f1236034113e5e8d6795e9f1ea8b5a
| 26,970 |
pl
|
Perl
|
silk-src/src/rwflowpack/tests/rwflowpack-pack-silk-send.pl
|
mjschultz/netsa-pkg
|
07bf4ff29a73ebc0f58e4aa27d3ad6b1dee7fc83
|
[
"Apache-2.0"
] | 3 |
2018-06-01T06:55:14.000Z
|
2021-11-14T22:51:04.000Z
|
silk-src/src/rwflowpack/tests/rwflowpack-pack-silk-send.pl
|
mjschultz/netsa-pkg
|
07bf4ff29a73ebc0f58e4aa27d3ad6b1dee7fc83
|
[
"Apache-2.0"
] | 3 |
2017-07-02T17:03:34.000Z
|
2021-09-09T17:05:31.000Z
|
silk-src/src/rwflowpack/tests/rwflowpack-pack-silk-send.pl
|
mjschultz/netsa-pkg
|
07bf4ff29a73ebc0f58e4aa27d3ad6b1dee7fc83
|
[
"Apache-2.0"
] | 4 |
2017-08-14T15:42:31.000Z
|
2022-01-24T16:24:27.000Z
|
#! /usr/bin/perl -w
#
#
# RCSIDENT("$SiLK: rwflowpack-pack-silk-send.pl 40a363507ed0 2014-04-01 14:09:52Z mthomas $")
use strict;
use SiLKTests;
use File::Find;
my $rwflowpack = check_silk_app('rwflowpack');
# find the apps we need. this will exit 77 if they're not available
my $rwuniq = check_silk_app('rwuniq');
# find the data files we use as sources, or exit 77
my %file;
$file{data} = get_data_or_exit77('data');
# prefix any existing PYTHONPATH with the proper directories
check_python_bin();
# set the environment variables required for rwflowpack to find its
# packing logic plug-in
add_plugin_dirs('/site/twoway');
# Skip this test if we cannot load the packing logic
check_exit_status("$rwflowpack --sensor-conf=$srcdir/tests/sensor77.conf"
." --verify-sensor-conf")
or skip_test("Cannot load packing logic");
# create our tempdir
my $tmpdir = make_tempdir();
# Generate the sensor.conf file
my $sensor_conf = "$tmpdir/sensor-templ.conf";
make_packer_sensor_conf($sensor_conf, 'silk', 0, 'polldir');
# the command that wraps rwflowpack
my $cmd = join " ", ("$SiLKTests::PYTHON $srcdir/tests/rwflowpack-daemon.py",
($ENV{SK_TESTS_VERBOSE} ? "--verbose" : ()),
($ENV{SK_TESTS_LOG_DEBUG} ? "--log-level=debug" : ()),
"--sensor-conf=$sensor_conf",
"--output-mode=sending",
"--copy $file{data}:incoming",
"--limit=501876",
"--basedir=$tmpdir",
"--flush-timeout=5",
"--",
"--pack-interfaces",
"--polling-interval=5",
"--file-cache-size=8",
);
# run it and check the MD5 hash of its output
check_md5_output('a78a286719574389a972724d761c931e', $cmd);
# the following directories should be empty
verify_empty_dirs($tmpdir, qw(error incoming incremental root));
# In sending mode, files are named with a mkstemp()-type extension.
# We must find all files having the same name minus this extension,
# and combine them before checking the output.
# path to the data directory
my $sender_dir = "$tmpdir/sender";
die "ERROR: Missing data directory '$sender_dir'\n"
unless -d $sender_dir;
# number of files representing a unique {flowtype,sensor,hour} tuple
# to find; i.e., where we have mapped out-S0_20090212.00.LELcDc and
# out-S0_20090212.00.PzEA5g into one entry
my $expected_count = 0;
my $file_count = 0;
# read in the MD5s for every {flowtype,sensor,hour} we expect to find.
# Read these from the bottom of this file.
my %md5_map;
while (my $lines = <DATA>) {
my ($md5, $path) = split " ", $lines;
$md5_map{$path} = $md5;
++$expected_count;
}
close DATA;
my %file_list = ();
# find the files in the data directory
File::Find::find({wanted => \&find_files, no_chdir => 1}, $sender_dir);
# combine the files and check the output
while (my ($basename, $files) = each %file_list) {
$cmd = ("$rwuniq --fields=1-5,type,in,out"
." --values=records,packets,sTime-Earliest,eTime-Latest"
." --ipv6-policy=ignore --sort-output"
.$files);
check_md5_output($md5_map{$basename}, $cmd);
++$file_count;
}
# did we find all our files?
if ($file_count != $expected_count) {
die "ERROR: Found $file_count files types; expected $expected_count\n";
}
# successful!
exit 0;
# this is called by File::Find::find. The full path to the file is in
# the $_ variable
sub find_files
{
# skip anything that is not a file
return unless -f $_;
my $path = $_;
# set $_ to just be the file basename, stripping the mkstemp
# extension
s,^.*/,,;
s,([^.]+\.\d\d)\..+$,$1,;
die "ERROR: Unexpected file $path\n"
unless $md5_map{$_};
$file_list{$_} .= " $path";
}
__DATA__
77f29ff118ec8f8d9854720aad1b5daf in-S0_20090212.00
6d3cb955aec81e58a517f55b18e39b39 in-S0_20090212.01
30fe140b75b349dc60108a15b0f09089 in-S0_20090212.02
e28a14cb50bc5105fef43d72db564748 in-S0_20090212.03
758e6eba47178e0d663d87666d55ed7c in-S0_20090212.04
e471c4b70280b88898ae222e2776073d in-S0_20090212.05
ad0e090687a78c0e1639da01bee6559f in-S0_20090212.06
f6595994e94b9ba804b0052fcb071d11 in-S0_20090212.07
e94c6132cdfd8f8e4ccb9afe938a34f9 in-S0_20090212.08
3a82ad1281818a43d5bbfdbc8bca27f8 in-S0_20090212.09
9651d0c506bc6e80aca888695d890d2b in-S0_20090212.10
4105ee1cf6b3d27899507c42639d9110 in-S0_20090212.11
3e3d9d547d3e9423991db159f6c737e2 in-S0_20090212.12
b083113d6492ea0b2b161a4a77ab7128 in-S0_20090212.13
6f9a9a7d6f5901856bc479b6885695cf in-S0_20090212.14
fa31379238c26ef9e1c68e4c13e05bf5 in-S0_20090212.15
dcd810086cbd7f91cb910f2dca64ad54 in-S0_20090212.16
2de393639f0aa77fefcaa6ffdc36a111 in-S0_20090212.17
00f87d5bad430a19e71ff1d166b64ad8 in-S0_20090212.18
e029a30cac3f61836caea418835b4683 in-S0_20090212.19
50cdebfffb888c3d41e0d3237ee97b9e in-S0_20090212.20
a8637137925ab10def285aa23f4d55ff in-S0_20090212.21
54327825923dc3b4f6d4167aa8c14625 in-S0_20090212.22
c5f4f2137a9e78c1451e11ad3a9e8f94 in-S0_20090212.23
e0a1613e402585ecd1b631b95ff647bb in-S0_20090213.00
0450bd7fc62a0eda7aef08187a54f3ce in-S0_20090213.01
236f8eb9d2347790d594e113de25a43e in-S0_20090213.02
8c824fbf3ad12a7cfdbec037bdb96e7c in-S0_20090213.03
e5287bd2c7198903a3806a0e7413cf5f in-S0_20090213.04
0f4a16d745e5e44e03a5afcc401a1ad8 in-S0_20090213.05
6d557cef4f14df43518300674ef817f0 in-S0_20090213.06
57e7a502bd456324d79962edde8bec34 in-S0_20090213.07
8eb585550fd3abb6364515072ee96830 in-S0_20090213.08
b853cdcea1ee93a86b9ebb4b2f2356bf in-S0_20090213.09
474a10aebe3ce4866cc38b5de6383189 in-S0_20090213.10
60d7969cadb20b871ef44878fa4053ca in-S0_20090213.11
763782f13e2c3129878278091979d225 in-S0_20090213.12
89d4d9668d183669b46040f046ab5f25 in-S0_20090213.13
7c34ecdafbfff97b039674e770c50229 in-S0_20090213.14
f2158ed69384057a4895f74c8ebfae3b in-S0_20090213.15
1800716a23083210d34e16c94162739f in-S0_20090213.16
da0bdf85b18f6ce1ab82a0e96861280b in-S0_20090213.17
321f44dc69fd6991408e35fc7731fe29 in-S0_20090213.18
fbc45dd1516208ea0a03846efc66f280 in-S0_20090213.19
a288a8e4438fb4ca1f45b7f24721c4c9 in-S0_20090213.20
35283133191f723784d272b91afb9db0 in-S0_20090213.21
9a138efa5a012330934453093221f8a2 in-S0_20090213.22
8d464cb3dc12857c5ed3d14a20a8481e in-S0_20090213.23
a9d9bc9af15c699176dffb00575f2170 in-S0_20090214.00
f3ad3b47a2b31406781a55f566af9af1 in-S0_20090214.01
c1be6f756657ac104c408e3303eb3e11 in-S0_20090214.02
f941437ced4d3e9b12f392b2981cce8b in-S0_20090214.03
3890f79f06e9a0e33226e4906e568798 in-S0_20090214.04
ba335d09f4c2b521f2260dff998a359a in-S0_20090214.05
22749dd93ee0eb34603df79334016623 in-S0_20090214.06
a17a501750f5fe12e5808789d36302f8 in-S0_20090214.07
6d98fa4a19e823ab0d58c10cbc11a376 in-S0_20090214.08
c96ac8de3410215118c6a253deff4745 in-S0_20090214.09
3a49909d13f6fa9a1de767145dbfd993 in-S0_20090214.10
28ac0b60a602aef649881fa763df0699 in-S0_20090214.11
7e7d462719c2f66632bac257dac8596b in-S0_20090214.12
78a9f1548e3e68144ce80bea7e3ec2b3 in-S0_20090214.13
23a97fd944f3a76419f106097d8ec181 in-S0_20090214.14
90f6b518dd33cbeb91fae80dd10922b7 in-S0_20090214.15
1e349cec6a65fa6fe18d5be5dfa0d034 in-S0_20090214.16
79f94e633af33d40619cb40270b25fc2 in-S0_20090214.17
99b2301062258820b06207158323458b in-S0_20090214.18
f91d9637705b4eb12197f6a09dc6450b in-S0_20090214.19
6871518a672bf8e3b71851907b35dab7 in-S0_20090214.20
ea5484d7851443cc913d523d5d2c99bc in-S0_20090214.21
11961c234485ea15269e39c742033ea5 in-S0_20090214.22
00f7f8d46735d834ba615c928f5d31d5 in-S0_20090214.23
30c37e221bc059b08f381d430e620ad1 iw-S0_20090212.00
b7626bd8ec89159ba880457f0cdca93c iw-S0_20090212.01
47d96d9b641c71c1c101ac3e33cf11ca iw-S0_20090212.02
c8cee2d278ce67547439be7889a7d0d9 iw-S0_20090212.03
7c9cddad1d9aada59f8f309fadef37a2 iw-S0_20090212.04
8d176f3de55364d3fd46c38f90179449 iw-S0_20090212.05
ff20b6cedd0a88976d8e1a3f2de79baf iw-S0_20090212.06
207959c0e4f73308b2a1057bc77d6e31 iw-S0_20090212.07
c5a8050892b07c3d64a009afde67f0ea iw-S0_20090212.08
a7b98a95a5f123e7ea1bfe0d4beb8bbf iw-S0_20090212.09
bd0707b207f52bed95eb0f191c838424 iw-S0_20090212.10
8971bc8cdd6c51bddfb5b5999a261dcc iw-S0_20090212.11
d1dc5ce3e363c81a093a26b0401081ef iw-S0_20090212.12
f81e4ee54e84aaf2082d8d6255900b4d iw-S0_20090212.13
3fa3470afd70ca96b0aa6d41a22adaa5 iw-S0_20090212.14
755e446c31b606f3f52c5026890cdba4 iw-S0_20090212.15
5fe95145dc560e91f9b6c6dc414ff9ad iw-S0_20090212.16
bf9225a6f83914c41453d9613cc28b43 iw-S0_20090212.17
0ea0cf9b0002dd7a73f3df1890870398 iw-S0_20090212.18
e70c8260321ba89fc3e028afc14598c9 iw-S0_20090212.19
0d174aed45051a942bf0fc0086504631 iw-S0_20090212.20
c2b9001361f79564e4f336b8a7d2aefd iw-S0_20090212.21
3e028b1c170fbcd8768d1883a363d95f iw-S0_20090212.22
fc6fb30f684c818df029b1147af2e8a5 iw-S0_20090212.23
c2d637097f0fb09b967b925271492d6f iw-S0_20090213.00
d4fcb30041436c3420ca550e4f5c89df iw-S0_20090213.01
8422393ce60a42362598223af77755d9 iw-S0_20090213.02
339652d5523d4411e2ffc2e7f7d2d65f iw-S0_20090213.03
54810293bb4478c24199073edf6560a8 iw-S0_20090213.04
98f95ef0f0879c97abfde6099741876d iw-S0_20090213.05
30491ef7a14451d6a41b5339452806be iw-S0_20090213.06
9e8647b8ea7297215130d703b8d4eee9 iw-S0_20090213.07
12de42cab40083eda97247683047a71a iw-S0_20090213.08
86e8514dbfe139cb835374edc80350db iw-S0_20090213.09
2f917cc5cfc7053a71e55a2b4fd635fd iw-S0_20090213.10
5642b831cff267617e8a670ed312c02b iw-S0_20090213.11
4fd1717cab32737ee6da220b46db82d3 iw-S0_20090213.12
aaa04a84dc86511cc489b14c92bc4301 iw-S0_20090213.13
ebaa1efbbc5e981cfe9e57580a67ab86 iw-S0_20090213.14
5703b92975d021f8fc218531847ecad4 iw-S0_20090213.15
d246f1791f01743992740eae2f44703c iw-S0_20090213.16
047de40843d6b296952dbb327c6f6b05 iw-S0_20090213.17
aa81a049c1a639cafc080de9ab7383c9 iw-S0_20090213.18
2b8ed8f39d7b05934481b9b97dc7a949 iw-S0_20090213.19
a9798777e2273965b6572652616429e7 iw-S0_20090213.20
3b8f2e45bac1525e0e785b8d1261e862 iw-S0_20090213.21
b672f1f6f431b2718e3d2247477ffcb8 iw-S0_20090213.22
d61c0e5dd38e9eaec88841f2ad87d205 iw-S0_20090213.23
2da813ecfee7a2eae4a356d14df712bc iw-S0_20090214.00
f9c916b912399435a97a46c6fa0b048a iw-S0_20090214.01
1568e533147da68e84b3ea64f23be4a8 iw-S0_20090214.02
fe85bb6fbc1e9a9a831d59bd904dbc69 iw-S0_20090214.03
f640090a61eb33393d7e2ea2202aaf48 iw-S0_20090214.04
3463f34cb03369db10467d2170fe75d6 iw-S0_20090214.05
ee26ca573c50d171e09906a1bba50047 iw-S0_20090214.06
a550b21d335ad382361616746b409aae iw-S0_20090214.07
ccc1694c9ddf5f9faeeeaf8a6a994cb9 iw-S0_20090214.08
d7e76c712e1091a3769b7545768d7667 iw-S0_20090214.09
7bdb6861b3a983deab8542cb99febbf8 iw-S0_20090214.10
3724778dc228a3a1f9833e5cbdc762c1 iw-S0_20090214.11
ee10f74d0f2afa25e2b51ceab9e2b38c iw-S0_20090214.12
bd131fc0226e694d23040780c71f08b0 iw-S0_20090214.13
e319743b5835f200b31192e3863896b4 iw-S0_20090214.14
bda48057483ab821033dcb8d15592f7a iw-S0_20090214.15
d94c8a5fe25ac87f6c218d9ac58c5eab iw-S0_20090214.16
897394f9b7079d674c181356c6985988 iw-S0_20090214.17
de08d51a1c82d29dc17a9b71e2979466 iw-S0_20090214.18
e6b5fcfff87533842a3f4ef0f9a43802 iw-S0_20090214.19
1d45fd27964b6f076eb8fccaf9044aca iw-S0_20090214.20
c67b62de9e11467b5b11998655557a41 iw-S0_20090214.21
a8c40fd68529ed5bca5f6cfd6aabfc55 iw-S0_20090214.22
48417f7c827da89f6aa6548f12594466 iw-S0_20090214.23
f56fe3e2e6b199d5da39adcbb9017532 other-S0_20090212.00
f968f8389f1a1156d69e2709e771ecaf other-S0_20090212.01
f38befa04e57d7c24f8833da80b0d680 other-S0_20090212.02
667fb61ce623a3b33c73e9bfcc517919 other-S0_20090212.03
47974de3b2a494598b0f87608d65856a other-S0_20090212.04
12dcfc9b4641c32482916703db862899 other-S0_20090212.05
2c9b38cdeab5b5d078fff888b00a8d54 other-S0_20090212.06
d6f1a6eeafcca90e061c0ad266228524 other-S0_20090212.07
1e3c5edc5b19e4a98e298fd4afdd787d other-S0_20090212.08
87c67f76d41b4f97529b8f08a12769c4 other-S0_20090212.09
6467f66489c116d85b63a96b2d395fd7 other-S0_20090212.10
a5a9e42bdee8f6e463e4e0f43d8e4657 other-S0_20090212.11
a1d9a6c64bfbc9a1eb0c933d609e1ef4 other-S0_20090212.12
dcbf6677f9c3c4e44722c2c21f7fd535 other-S0_20090212.13
72fe4173f9d7443edbdab85803bb4627 other-S0_20090212.14
ef6d54ae6a3c2763d6d3e1ae7cb73053 other-S0_20090212.15
675b7590d59ed03065a1697de6b4437c other-S0_20090212.16
861d6831999c1f4f51758281895003bf other-S0_20090212.17
0cb6b54714fc2e0b6f44b229a3e28283 other-S0_20090212.18
4e0b9d53b2e85bcfc4bfcc10ce33cff2 other-S0_20090212.19
a78c2b9886fff12fa43af34b8b424f77 other-S0_20090212.20
812904d99090b6c145c904b82453b98e other-S0_20090212.21
a1aa90c87896adb00ec7b7b75bf33dc9 other-S0_20090212.22
925087f241c9e6aec5e5b5307d57d52a other-S0_20090212.23
238cf0be53ac1b15c6e79fd2c9af9e7a other-S0_20090213.00
a8e31a07430485e9ab56991ac6eb754d other-S0_20090213.01
76ec54b662b4007b1b0995043e43c082 other-S0_20090213.02
b75bc9cbe60968464eeda56bb0e8030a other-S0_20090213.03
27961637a4742a0b65fbbfb6753b269e other-S0_20090213.04
0f01c8891ee2f26a5a74b9060bcf3526 other-S0_20090213.05
df4369cdd51ff81d964a5274893b45b8 other-S0_20090213.06
9dbb3c0491ac452f0fcec61d60641337 other-S0_20090213.07
e93a565b6dff02e847decd4eb63c883f other-S0_20090213.08
33b4f1f8ce175959f1ea9c6b52e02b7b other-S0_20090213.09
f57ff177cac97e35430f48bfb340b9a0 other-S0_20090213.10
fe94ec57a41a838e74a27af5fec1a55c other-S0_20090213.11
535f9e099e0337bbcbf79cee3ba0cb34 other-S0_20090213.12
eca5d954cfa56f5931d59f78bc884f5b other-S0_20090213.13
1db20b8a010b418ad5c825697812b4df other-S0_20090213.14
17a48bf2e56d648747e7973d584109e8 other-S0_20090213.15
277ba6109da427987836d640d6375e4e other-S0_20090213.16
30dbfd71986159bbf1a758a2ef0584cc other-S0_20090213.17
7c9dc506b6ca15ffafeae9daf82535a5 other-S0_20090213.18
2191f24be75533ea61ad3e4fe716b3f8 other-S0_20090213.19
6de50bedf3779f83a1231d698e65b025 other-S0_20090213.20
eb492830d114b6a704dd90c4aa5fb618 other-S0_20090213.21
0187b323781282419c3c8306e5049f8a other-S0_20090213.22
527c0480f40e184b9a073df00e03418a other-S0_20090213.23
4728a656a3a9b76635e53c50d61c82bf other-S0_20090214.00
f252db400be3f9bd02a009f02013bb23 other-S0_20090214.01
846da2632cec636170795f2d41553af1 other-S0_20090214.02
5b4d45d3e56d729aed17cfeab5decb2c other-S0_20090214.03
5c201d47dedb46216604ea84966dd487 other-S0_20090214.04
d948c6d9210d039cf38d3a2bbce7274d other-S0_20090214.05
006c23e1a53fe1e245dd3a6b3627f82f other-S0_20090214.06
93d33fd6142892877eda09ff1ac8b9eb other-S0_20090214.07
ade18c7a1b3a54091b9bea0b791a03da other-S0_20090214.08
b02a617e16b3ca34cf4ab87b9c4294d7 other-S0_20090214.09
0e5478558046a3eca22f5668f253fd39 other-S0_20090214.10
299eea86a06e89bf99f63395c6dbc747 other-S0_20090214.11
d5aa3a5d9de20bd4b5221aae3a5da5f6 other-S0_20090214.12
87a487b613d8a3828037a6dcd84901fd other-S0_20090214.13
768d1d7f88d8cf647585fbaa4d8296bb other-S0_20090214.14
50616b16969740ab6b20e18afa89549a other-S0_20090214.15
52f8831ef43fd3f7ed33c26e6796d0cb other-S0_20090214.16
071989c0d99c48e3fd038e68e69f6f9e other-S0_20090214.17
97b7a7388e6a2229ece42b5104654780 other-S0_20090214.18
d180e64415fc5fd1d4cfe067dfac8cb8 other-S0_20090214.19
0d15419b7a996b49705fbad7d7b67fe6 other-S0_20090214.20
2f9c10796f87cfb5f852429740ae1da3 other-S0_20090214.21
39383ed3fb3d303d0765f43061984b6c other-S0_20090214.22
1e0b90ed2d612bfe0148acc52f14bb91 other-S0_20090214.23
c3149c11692cc939ff212a6708ce2c55 out-S0_20090212.00
e22bb11570e2264a38a55845a9c0de93 out-S0_20090212.01
b87a23a8e2c3f1e3a5bff37cf0a8bdf2 out-S0_20090212.02
34cea4decdf32b90460d47b5651b40bb out-S0_20090212.03
7be2458209fe3c6ab3db80c4a33e044e out-S0_20090212.04
8eb31497340a13469c751280afc966c4 out-S0_20090212.05
6227f71fdb39c0bd164b7ab6240754b9 out-S0_20090212.06
b057c5b2d2f5e0fd55b2da965e915406 out-S0_20090212.07
86d6d84b26440ab3783b9896039a9d49 out-S0_20090212.08
784f41a1b802e9fd8597c2c9883ba25e out-S0_20090212.09
0cd43f9ed0d41edf09b2f4459bbe5263 out-S0_20090212.10
8cab0e8e1fefaedb2b9b98c1f79c4c3c out-S0_20090212.11
485ea52c6f785aa8a4309880204e4c81 out-S0_20090212.12
08fbc05675f9dae71abd0578ce984cbb out-S0_20090212.13
7085389ef23c336f817653c2090a3c4c out-S0_20090212.14
c8656d1db1ccacaf322a37a4c30a8300 out-S0_20090212.15
55b64cf7f9ce00113cd1edfb47f39925 out-S0_20090212.16
624a5b8a60b3971bf05411a5f03c8a9d out-S0_20090212.17
2fa53a56a08a85931a86c94570788b0f out-S0_20090212.18
f67537eac5bdd4c9d329d482953eb23a out-S0_20090212.19
03d89a918f6121a33cc7a7ff73ead242 out-S0_20090212.20
ee33bd6aadecd5f17590265b5eeca93e out-S0_20090212.21
f0880deaddbfdf3b21d58eb16800d3ab out-S0_20090212.22
838c34af99fc83c4a755dfae53e3294e out-S0_20090212.23
da6aa31da4b5f524740da5792082f5e9 out-S0_20090213.00
4bcd970ca7033f02746c23a5fbf893a3 out-S0_20090213.01
6522331a57485d6a71c988f7e73832ef out-S0_20090213.02
4e354a6f661f7e0de11ab4ccdd5dfab2 out-S0_20090213.03
e3682ef6911e876a0482ebb31136ed54 out-S0_20090213.04
0815e8a1f4b2a293804c4cbe6b944e97 out-S0_20090213.05
694cfb6f06a432a2db075a6f053bd4e1 out-S0_20090213.06
b2449321dfb99fc2e3c768852ad23b53 out-S0_20090213.07
e700bbf1e385c2247aba9289e932d8c9 out-S0_20090213.08
403baa5e6988bfd078109cec8e836357 out-S0_20090213.09
4dac2003d70f7d549305b6d2707c45b8 out-S0_20090213.10
fae8b034d87a5e5c911f373aa7da7a58 out-S0_20090213.11
fb2566846f6c2feb8d5ad9f448a87497 out-S0_20090213.12
9a5c4e89f7b095da4bc4610556a27a89 out-S0_20090213.13
46dd9b8fa3a7949f1cf32b67c8cc5071 out-S0_20090213.14
a3d3a8317378221e43f11d45d42c4ad1 out-S0_20090213.15
7ded609e9b6fdb6a211bcd4c2fabd829 out-S0_20090213.16
5eb44f5f5a347e0b38d77ff349c806fa out-S0_20090213.17
82c33b0b96d561795ace3fe34f19d527 out-S0_20090213.18
7c45e83825af42ab50d67d5a3953d51f out-S0_20090213.19
523d9cb5ff627f6bed9c347aebc0e8bd out-S0_20090213.20
37b817e1915b7c47a3daa2e8e8eaff0d out-S0_20090213.21
9b95f30b15ecddf54f8df75f8925e56f out-S0_20090213.22
9c0122abcce77a465a19c1da642de696 out-S0_20090213.23
f79a81203b697a41343b55254824e93a out-S0_20090214.00
0ede1ffce53ee32eb915d190b8b6903d out-S0_20090214.01
1c8b3bd5f818685d48e4ec2772d4fbf5 out-S0_20090214.02
770f96f35c1149926cfa84deb52fff3a out-S0_20090214.03
3fd64ba5f7f5f005531310886ba3ab1c out-S0_20090214.04
c4b68a796eec471ed1ba584a9c9413b2 out-S0_20090214.05
4ab857a41a94234e9f1d847c7fcfde4e out-S0_20090214.06
cb5dde5c9a55fafc0575de4b6d0a3b91 out-S0_20090214.07
0c91d83321835b81d56abd32311c1839 out-S0_20090214.08
dd2849e6f1d97672b08342448b2a9d43 out-S0_20090214.09
b4d4760c77626d7aa2f26a1821cc9409 out-S0_20090214.10
465ea62b3a8b7e3165e1c59628d92483 out-S0_20090214.11
f377be8ea32c8edc7ec611bf2088f1b4 out-S0_20090214.12
04eac0f411954861e697edf01fdeaef1 out-S0_20090214.13
a5bb8f1dac1daad03b3607df7c775789 out-S0_20090214.14
c2c2dec76fcd4f3134b7437b6fecbda6 out-S0_20090214.15
baa746a4ccfda7ac13065b30f873a1c3 out-S0_20090214.16
25c7f1bd2f43a465a7cce4d43460d847 out-S0_20090214.17
93193931a67f6c271bd6f6b5bad28cfa out-S0_20090214.18
a7f42c92b070a30e3218c407942d9fb7 out-S0_20090214.19
fbf38e1cc2735ce95e4e2c198c12982a out-S0_20090214.20
68b2701b266badefe6a4ab270075c7bb out-S0_20090214.21
2540e454cdf4a7661a771754daa8cff7 out-S0_20090214.22
3140b93073575130bb33e5c4e2c96353 out-S0_20090214.23
fcc0358fedf6b76536b3e1fbf1c9659c outnull-S0_20090212.00
b2aa94264201fdaca5928e464e10abfc outnull-S0_20090212.01
6863cb5a3b5c68803471c34a714f1bf3 outnull-S0_20090212.02
83ca2b2a57831fdd8449024c195fe78b outnull-S0_20090212.03
2cbc3f96ff39cc3d735b15f5a311ca7b outnull-S0_20090212.04
9a6567a7b76596766c36e9edf7425fe3 outnull-S0_20090212.05
de3213456ad266a37cd9ca8d219b9f73 outnull-S0_20090212.06
fcc28a84ed3d4176080778f96815a625 outnull-S0_20090212.07
f294668970c57f2a1831f3f99180976e outnull-S0_20090212.08
c91745ecc0f13396b62b4acc8845420e outnull-S0_20090212.09
e862839ecc2b9749f6ba4fefd45df080 outnull-S0_20090212.10
7880b10278b01fa300fa9a63d2ca30cb outnull-S0_20090212.11
d53e07baab53273879672e91a1f22aa6 outnull-S0_20090212.12
7fdcf68a07ea1918b213fc5c2857506c outnull-S0_20090212.13
2e6bfe430bb4e5195918180a87fa6521 outnull-S0_20090212.14
031e1e3c409eab411a78af9cecce57ba outnull-S0_20090212.15
80d120f43fa5d2951429f2fa27535c95 outnull-S0_20090212.16
de4f2ce7aa5ba0cbdc9126cf75f9d351 outnull-S0_20090212.17
30d069d5068fb3e32a14b3e0cbb34e06 outnull-S0_20090212.18
dccd410f347f952debdab1b8bc131068 outnull-S0_20090212.19
b3bc1bd4366e0af34770e1b3f22e4a9d outnull-S0_20090212.20
646cdedfd0474dcd4c1d7663271fdc11 outnull-S0_20090212.21
c45a0d140fa3346145ce5e1fab5edc27 outnull-S0_20090212.22
761541aa20f0d9e09f5c75afc4127f0e outnull-S0_20090212.23
366f038f9ac645e5fac204b047e71240 outnull-S0_20090213.00
cb400eb1822f42397465ad39e2ffb98b outnull-S0_20090213.01
b556231bddd62a10d59b1afa4b80588f outnull-S0_20090213.02
3035f7c1b209a3f481857aead6a5eeb3 outnull-S0_20090213.03
d12c8ae68b66a66f1e6a7428afd5add8 outnull-S0_20090213.04
c735a4fde406db32a5d4753f018282f5 outnull-S0_20090213.05
0328a61ce99215de47136030dbb9b02a outnull-S0_20090213.06
edd4b06ecb241f691a0405c6fac3c562 outnull-S0_20090213.07
80160b05611682fefbc140dc9a2a3a8d outnull-S0_20090213.08
3efa0c7b8ee78a7e3882036d9e4d398e outnull-S0_20090213.09
6d31bdec7117cbac334b19f3dc5d270d outnull-S0_20090213.10
25a5bcdc9bc6a083ca73d6ce13cee042 outnull-S0_20090213.11
f3376725035a5102e3d119f4ab61785e outnull-S0_20090213.12
938eb99b8c18c40c01d2a6e263326226 outnull-S0_20090213.13
93eabd8c29299fd54472658a416edefc outnull-S0_20090213.14
5d044726176fdd80a4d7ac1198e08a15 outnull-S0_20090213.15
919c310feb6cd836e81fe4d15177b9c4 outnull-S0_20090213.16
3a8431cbf53011f27e88e06dff138445 outnull-S0_20090213.17
483f56e7009a5f1840f42ca20ca78cc4 outnull-S0_20090213.18
beba7b17dddd958c456569d016124ff8 outnull-S0_20090213.19
424501347bd65861991ed48d41308e7a outnull-S0_20090213.20
0e0cc98dc9644c7ba70085d0b4a9ae05 outnull-S0_20090213.21
0a89bb3a4894b8ca6104fb1b53cc9e07 outnull-S0_20090213.22
58be4ca4f4b671fdf1bbd600e4c2bcf6 outnull-S0_20090213.23
8389ef2b046c30effd4060c297bab678 outnull-S0_20090214.00
3909698c48635085bf43bc229b022fbd outnull-S0_20090214.01
1f370f19e5d8912155807e574a1374d4 outnull-S0_20090214.02
5ff43e98cd6ecc7a5ce041130ae04ecf outnull-S0_20090214.03
11d909a38d27ceec290ef0895fe9c0b8 outnull-S0_20090214.04
5e063f85782f5aafae2470b1bcdec8ee outnull-S0_20090214.05
e4c047ef227fbb5443ae1952cd3fddd2 outnull-S0_20090214.06
2cf5ea6ad1ee422b201d180d8160e341 outnull-S0_20090214.07
0ed0f03310113bcf3a52d1de57217633 outnull-S0_20090214.08
c2faf7ad1587acd7f20b415ee4bb8d0f outnull-S0_20090214.09
b1eaccdc2237ef8bc7836419b90d85ed outnull-S0_20090214.10
e1180b23aac831a6c5eeb9928052d42c outnull-S0_20090214.11
5e30a3129152d6392e9a022339388e5c outnull-S0_20090214.12
623e3938a9c24f30bb8e5b782bea6e79 outnull-S0_20090214.13
2493fe3049e63aa7114c541b1a7033de outnull-S0_20090214.14
f532a108abcc5efa11b5b1677819ff7f outnull-S0_20090214.15
8ff9a41510e5adb6d3b72b5456db6495 outnull-S0_20090214.16
a4b62debb48775a66a10af584907754f outnull-S0_20090214.17
b7cfa02ed2bd2bc2803191a2847e8cf0 outnull-S0_20090214.18
9cc2594fb361592f921e41b19c8442ca outnull-S0_20090214.19
21606dba70b3126253c96e5ffcbaba32 outnull-S0_20090214.20
a30efb49c148778bf0f5dd513185d104 outnull-S0_20090214.21
9ea97f7e0927eda9cb46031387c8bddb outnull-S0_20090214.22
ad99c8cad59b4cd7c7498f5176069ac4 outnull-S0_20090214.23
2a298daf79f12fbc398d697d4d7b17e3 ow-S0_20090212.00
8de1851e9345fbb77bb8d157d3716547 ow-S0_20090212.01
60b07338d2bce80c60ba20dbddf79157 ow-S0_20090212.02
b26a211fd0fa33dc6d3c6b27dcba051d ow-S0_20090212.03
ca3fa7f20cd9c7f746e06acfd8c552cf ow-S0_20090212.04
7b30238b5b3adeb9cfeab25d863f23df ow-S0_20090212.05
2d7e68ce3d252065fe8e8ddd4702028b ow-S0_20090212.06
c68e8179018e263984dc52c0304e69f6 ow-S0_20090212.07
f3602b85b4da206ebcd8dc0eeadb1b1a ow-S0_20090212.08
e8f2009ce4a6d8a2d28006dd99eb400f ow-S0_20090212.09
76175ff0a16b11e58cca09e776735fcc ow-S0_20090212.10
4be82aaba3f38e91dc3d8ba7f691a6c0 ow-S0_20090212.11
9632689e4d9dd6747cd7b3cfacfbbcbb ow-S0_20090212.12
f36655abbcb520d7e781b5a29f0c6778 ow-S0_20090212.13
ffed198a1c6dc337d39b992e43da3383 ow-S0_20090212.14
54b2fd72f1ac6a4df3baeb0e2b85a047 ow-S0_20090212.15
d4411fc476cc4c7fe804abb973dff91d ow-S0_20090212.16
8f5f4e44824a483f400f8af23f1df18d ow-S0_20090212.17
0a6f64b3080f45ab077d7edd551bf175 ow-S0_20090212.18
18a196e6cd8cd5b2711d025e0030c622 ow-S0_20090212.19
2c18983636f1921ab5acad9a6f560716 ow-S0_20090212.20
77937df6d25a28d2937107e5a3993f4e ow-S0_20090212.21
de20912d50be7d9ebffde6e75374a39e ow-S0_20090212.22
3b62659b655758af21b0d4f10726fbbf ow-S0_20090212.23
7c2c2f17d7f040b4f11806399d10c3fa ow-S0_20090213.00
db67cee28506ca84b4331dff53613bcf ow-S0_20090213.01
4a2180f2e5261207837cf4b0740dde66 ow-S0_20090213.02
2457c85ba5468033a5c5bbc3db401394 ow-S0_20090213.03
8f8ddc73dc72e5182957caed6c03244e ow-S0_20090213.04
9b7efef6f579c57e69eccce4c73dbbc3 ow-S0_20090213.05
75735450ea551e79b8967344ee9f9c47 ow-S0_20090213.06
3b518a490e6f8ec184d6c00a60ff02e9 ow-S0_20090213.07
4b8023b2372e6ea33bf7c02e75b112a5 ow-S0_20090213.08
bc8f88caa2ab8e86e41abf90b6850a02 ow-S0_20090213.09
2e58cec546c3820656cbba027c798e86 ow-S0_20090213.10
ee316626beda42b76516e43c1a6d4278 ow-S0_20090213.11
33777314081952815ceab7fd44f3dbb5 ow-S0_20090213.12
a6fb2d5efd8caa7ef02eba1392ee94cc ow-S0_20090213.13
15feeb1e22f7d253702c8c922e8ba2aa ow-S0_20090213.14
ef335f553782aed8387354d470e8b388 ow-S0_20090213.15
990c87ac0333b59a7a9cb286e7135396 ow-S0_20090213.16
72db300c6986fc6ac078168ab491a653 ow-S0_20090213.17
d4625142374250e8db905b55a4771782 ow-S0_20090213.18
ef7190c2e94de8bced1c59b96e51649d ow-S0_20090213.19
85d2803fd1ae362ba06d88f7466da76c ow-S0_20090213.20
ea8bcb8ec108ddf31bfff78cc125591c ow-S0_20090213.21
d1caa84ede160177be59afdfc72e2612 ow-S0_20090213.22
fabaca1408aa6a4204294e17ae07e730 ow-S0_20090213.23
4c66b348f796e91081b47c99229aa7f1 ow-S0_20090214.00
a05c8a0eb17fbedbbe469792a63add76 ow-S0_20090214.01
2169e4edc3cb9abbe74ceeb41fd55fb0 ow-S0_20090214.02
4a14a1f0cfcfc60436f6b04d827efd7f ow-S0_20090214.03
53754a2ad433f2b76f08181083c49d37 ow-S0_20090214.04
15a8b8acc50b5b4b8260f9da7b4b0478 ow-S0_20090214.05
d830a5548f8cf1065119fbb9e8e459ce ow-S0_20090214.06
4f0c0803079a5d935b9dc5258f826080 ow-S0_20090214.07
065a3df392e62741a38781180dac0756 ow-S0_20090214.08
732b72e04015745e8281c7f5a39b8712 ow-S0_20090214.09
4ec2c209d98e6f2463a001a7aacea9f0 ow-S0_20090214.10
499de0ddd6be8310ea352f0a30999c1f ow-S0_20090214.11
1ae6a78a1c479a5f1b48c15e459d8554 ow-S0_20090214.12
16f798118335e39d3b4083f638dab2dd ow-S0_20090214.13
360550a8fbbb8e7de3344a1a71e624d4 ow-S0_20090214.14
257e5f9d6de28b343dd93bbd19bda76e ow-S0_20090214.15
19b5cff38856af2f357fb038c9b4a94f ow-S0_20090214.16
cf3f35af3df1a55236511fcd1497d950 ow-S0_20090214.17
e250f725aba42ed6d8bddf11b9313f38 ow-S0_20090214.18
f2edfe3299936eb5ddd0b877c398e66f ow-S0_20090214.19
85efa6e66b830e60121ea9b5f46f681e ow-S0_20090214.20
e18f67e265bd949d25883f1cab56ea40 ow-S0_20090214.21
3b0c78beb091caec48009b83096401ba ow-S0_20090214.22
762d1f9abb58dcd87707cec16761439b ow-S0_20090214.23
| 48.074866 | 93 | 0.867779 |
ed4fc7c20d9fca2a2a3cde52337e211269494357
| 2,271 |
pm
|
Perl
|
auto-lib/Paws/ElastiCache/NodeGroupMember.pm
|
shogo82148/aws-sdk-perl
|
a87555a9d30dd1415235ebacd2715b2f7e5163c7
|
[
"Apache-2.0"
] | null | null | null |
auto-lib/Paws/ElastiCache/NodeGroupMember.pm
|
shogo82148/aws-sdk-perl
|
a87555a9d30dd1415235ebacd2715b2f7e5163c7
|
[
"Apache-2.0"
] | null | null | null |
auto-lib/Paws/ElastiCache/NodeGroupMember.pm
|
shogo82148/aws-sdk-perl
|
a87555a9d30dd1415235ebacd2715b2f7e5163c7
|
[
"Apache-2.0"
] | null | null | null |
# Generated by default/object.tt
package Paws::ElastiCache::NodeGroupMember;
use Moose;
has CacheClusterId => (is => 'ro', isa => 'Str');
has CacheNodeId => (is => 'ro', isa => 'Str');
has CurrentRole => (is => 'ro', isa => 'Str');
has PreferredAvailabilityZone => (is => 'ro', isa => 'Str');
has ReadEndpoint => (is => 'ro', isa => 'Paws::ElastiCache::Endpoint');
1;
### main pod documentation begin ###
=head1 NAME
Paws::ElastiCache::NodeGroupMember
=head1 USAGE
This class represents one of two things:
=head3 Arguments in a call to a service
Use the attributes of this class as arguments to methods. You shouldn't make instances of this class.
Each attribute should be used as a named argument in the calls that expect this type of object.
As an example, if Att1 is expected to be a Paws::ElastiCache::NodeGroupMember object:
$service_obj->Method(Att1 => { CacheClusterId => $value, ..., ReadEndpoint => $value });
=head3 Results returned from an API call
Use accessors for each attribute. If Att1 is expected to be an Paws::ElastiCache::NodeGroupMember object:
$result = $service_obj->Method(...);
$result->Att1->CacheClusterId
=head1 DESCRIPTION
Represents a single node within a node group (shard).
=head1 ATTRIBUTES
=head2 CacheClusterId => Str
The ID of the cluster to which the node belongs.
=head2 CacheNodeId => Str
The ID of the node within its cluster. A node ID is a numeric
identifier (0001, 0002, etc.).
=head2 CurrentRole => Str
The role that is currently assigned to the node - C<primary> or
C<replica>. This member is only applicable for Redis (cluster mode
disabled) replication groups.
=head2 PreferredAvailabilityZone => Str
The name of the Availability Zone in which the node is located.
=head2 ReadEndpoint => L<Paws::ElastiCache::Endpoint>
The information required for client programs to connect to a node for
read operations. The read endpoint is only applicable on Redis (cluster
mode disabled) clusters.
=head1 SEE ALSO
This class forms part of L<Paws>, describing an object used in L<Paws::ElastiCache>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: L<https://github.com/pplu/aws-sdk-perl>
Please report bugs to: L<https://github.com/pplu/aws-sdk-perl/issues>
=cut
| 25.806818 | 105 | 0.730515 |
ed5a465cb95300fabf1941df0f0a29e4d300a552
| 586 |
pl
|
Perl
|
demo/demo_bitwise.pl
|
gitpan/Lingua-Romana-Perligata
|
686e3f3786ee81aac3ca9350a62d8f627d863688
|
[
"Artistic-1.0-Perl",
"ClArtistic"
] | 5 |
2019-10-25T17:20:45.000Z
|
2022-02-02T13:56:28.000Z
|
demo/demo_bitwise.pl
|
gitpan/Lingua-Romana-Perligata
|
686e3f3786ee81aac3ca9350a62d8f627d863688
|
[
"Artistic-1.0-Perl",
"ClArtistic"
] | null | null | null |
demo/demo_bitwise.pl
|
gitpan/Lingua-Romana-Perligata
|
686e3f3786ee81aac3ca9350a62d8f627d863688
|
[
"Artistic-1.0-Perl",
"ClArtistic"
] | null | null | null |
use Lingua::Romana::Perligata;
dum unum fac sic
dictum sic X: cis tum lacunam egresso scribe.
xo vestibulo perlegementum da.
xo xum tum nullum addementum da.
dictum sic Y: cis tum lacunam egresso scribe.
yo vestibulo perlegementum da.
xo xum tum nullum addementum da.
dictum sic and: cis tum lacunam tum cum xum tum yum consociamentum
tum novumversum scribe egresso.
dictum sic or: cis tum lacunam tum cum xum tum yum intersecamentum
tum novumversum scribe egresso.
dictum sic xor: cis tum lacunam tum cum xum tum yum discernementum
tum novumversum scribe egresso.
cis
| 34.470588 | 68 | 0.783276 |
ed44437d345b8294064f7b30aae643fb451c3cca
| 3,106 |
t
|
Perl
|
t/server/controller/download_url.t
|
twins2020/metacpan-api
|
ed27c320b743993214dc8d3e76adfbb9cd4c334b
|
[
"Artistic-1.0"
] | 113 |
2016-08-10T08:54:18.000Z
|
2022-03-20T01:39:53.000Z
|
t/server/controller/download_url.t
|
twins2020/metacpan-api
|
ed27c320b743993214dc8d3e76adfbb9cd4c334b
|
[
"Artistic-1.0"
] | 347 |
2016-07-04T14:37:39.000Z
|
2022-03-20T12:57:12.000Z
|
t/server/controller/download_url.t
|
twins2020/metacpan-api
|
ed27c320b743993214dc8d3e76adfbb9cd4c334b
|
[
"Artistic-1.0"
] | 110 |
2016-11-14T22:27:09.000Z
|
2022-03-20T06:41:33.000Z
|
use strict;
use warnings;
use lib 't/lib';
use Cpanel::JSON::XS ();
use HTTP::Request::Common qw( GET );
use MetaCPAN::Server ();
use MetaCPAN::TestHelpers qw( test_cache_headers );
use Plack::Test ();
use Test::More;
use Ref::Util qw( is_hashref );
my $app = MetaCPAN::Server->new->to_app();
my $test = Plack::Test->create($app);
my @tests = (
[ 'no parameters', '/download_url/Moose', 'latest', '0.02', ],
[
'version == (1)', '/download_url/Moose?version===0.01',
'cpan', '0.01'
],
[
'version == (2)', '/download_url/Moose?version===0.02',
'latest', '0.02'
],
[
'version != (1)', '/download_url/Moose?version=!=0.01',
'latest', '0.02'
],
[
'version != (2)', '/download_url/Moose?version=!=0.02',
'cpan', '0.01'
],
[
'version <= (1)', '/download_url/Moose?version=<=0.01',
'cpan', '0.01'
],
[
'version <= (2)', '/download_url/Moose?version=<=0.02',
'latest', '0.02'
],
[ 'version >=', '/download_url/Moose?version=>=0.01', 'latest', '0.02' ],
[
'range >, <',
'/download_url/Try::Tiny?version=>0.21,<0.27',
'cpan',
'0.24',
'1a12a51cfeb7e2c301e4ae093c7ecdfb',
'9b7a1af24c0256973d175369ebbdc25ec01e2452a97f2d3ab61481c826f38d81',
],
[
'range >, <, !',
'/download_url/Try::Tiny?version=>0.21,<0.27,!=0.24',
'cpan', '0.23'
],
[
'range >, <; dev',
'/download_url/Try::Tiny?version=>0.21,<0.27&dev=1',
'cpan', '0.26'
],
[
'range >, <, !; dev',
'/download_url/Try::Tiny?version=>0.21,<0.27,!=0.26&dev=1',
'cpan', '0.25'
],
);
for (@tests) {
my ( $title, $url, $status, $version, $checksum_md5, $checksum_sha256 )
= @$_;
subtest $title => sub {
my $res = $test->request( GET $url );
ok( $res, "GET $url" );
is( $res->code, 200, "code 200" );
test_cache_headers(
$res,
{
cache_control => 'private',
surrogate_key =>
'content_type=application/json content_type=application',
surrogate_control => undef,
},
);
is(
$res->header('content-type'),
'application/json; charset=utf-8',
'Content-type'
);
my $content = Cpanel::JSON::XS::decode_json $res->content;
ok( is_hashref($content), 'content is a JSON object' );
is( $content->{status}, $status, "correct status ($status)" );
is( $content->{version}, $version, "correct version ($version)" );
if ($checksum_md5) {
is( $content->{checksum_md5},
$checksum_md5, "correct checksum_md5 ($checksum_md5)" );
}
if ($checksum_sha256) {
is( $content->{checksum_sha256},
$checksum_sha256,
"correct checksum_sha256 ($checksum_sha256)" );
}
};
}
done_testing;
| 28.236364 | 77 | 0.490341 |
ed6feac0e466042781b0ca0f205c7cf2ec56f67f
| 2,760 |
t
|
Perl
|
t/samtools_index.t
|
Clinical-Genomics/MIP
|
db2e89fec2674f5c12dbf6ec89eba181433fc742
|
[
"MIT"
] | 22 |
2017-09-04T07:50:54.000Z
|
2022-01-01T20:41:45.000Z
|
t/samtools_index.t
|
Clinical-Genomics/MIP
|
db2e89fec2674f5c12dbf6ec89eba181433fc742
|
[
"MIT"
] | 834 |
2017-09-05T07:18:38.000Z
|
2022-03-31T15:27:49.000Z
|
t/samtools_index.t
|
Clinical-Genomics/MIP
|
db2e89fec2674f5c12dbf6ec89eba181433fc742
|
[
"MIT"
] | 11 |
2017-09-12T10:53:30.000Z
|
2021-11-30T01:40:49.000Z
|
#!/usr/bin/env perl
use 5.026;
use Carp;
use charnames qw{ :full :short };
use English qw{ -no_match_vars };
use File::Basename qw{ dirname };
use File::Spec::Functions qw{ catdir };
use FindBin qw{ $Bin };
use open qw{ :encoding(UTF-8) :std };
use Params::Check qw{ allow check last_error };
use Test::More;
use utf8;
use warnings qw{ FATAL utf8 };
## CPANM
use autodie qw{ :all };
use Modern::Perl qw{ 2018 };
use Readonly;
## MIPs lib/
use lib catdir( dirname($Bin), q{lib} );
use MIP::Constants qw{ $COMMA $SPACE };
use MIP::Test::Commands qw{ test_function };
BEGIN {
use MIP::Test::Fixtures qw{ test_import };
### Check all internal dependency modules and imports
## Modules with import
my %perl_module = (
q{MIP::Program::Samtools} => [qw{ samtools_index }],
);
test_import( { perl_module_href => \%perl_module, } );
}
use MIP::Program::Samtools qw{samtools_index};
diag( q{Test samtools_index from Samtools.pm}
. $COMMA
. $SPACE . q{Perl}
. $SPACE
. $PERL_VERSION
. $SPACE
. $EXECUTABLE_NAME );
## Base arguments
my @function_base_commands = qw{ samtools };
my %base_argument = (
filehandle => {
input => undef,
expected_output => \@function_base_commands,
},
);
## Can be duplicated with %base and/or %specific to enable testing of each individual argument
my %required_argument = (
filehandle => {
input => undef,
expected_output => \@function_base_commands,
},
infile_path => {
input => q{infile.test},
expected_output => q{infile.test},
},
);
## Specific arguments
my %specific_argument = (
bai_format => {
input => 1,
expected_output => q{-b},
},
stderrfile_path => {
input => q{stderrfile.test},
expected_output => q{2> stderrfile.test},
},
stderrfile_path_append => {
input => q{stderrfile_path_append},
expected_output => q{2>> stderrfile_path_append},
},
stdoutfile_path => {
input => q{outfile_path},
expected_output => q{1> outfile_path},
},
);
## Coderef - enables generalized use of generate call
my $module_function_cref = \&samtools_index;
## Test both base and function specific arguments
my @arguments = ( \%base_argument, \%specific_argument );
foreach my $argument_href (@arguments) {
my @commands = test_function(
{
argument_href => $argument_href,
function_base_commands_ref => \@function_base_commands,
module_function_cref => $module_function_cref,
required_argument_href => \%required_argument,
}
);
}
done_testing();
| 24.864865 | 94 | 0.608696 |
ed50659195b6e517c32ff444809f1059c3f38c74
| 2,619 |
pl
|
Perl
|
src/bin/pg_rewind/t/003_extrafiles.pl
|
RhodiumToad/postgresql_projects
|
dcf5e319089e750d726a0e18a8c90b258c39362c
|
[
"PostgreSQL"
] | 2 |
2016-04-15T14:31:22.000Z
|
2021-09-06T20:01:36.000Z
|
src/bin/pg_rewind/t/003_extrafiles.pl
|
egisatoshi/postgres
|
dcf5e319089e750d726a0e18a8c90b258c39362c
|
[
"PostgreSQL"
] | null | null | null |
src/bin/pg_rewind/t/003_extrafiles.pl
|
egisatoshi/postgres
|
dcf5e319089e750d726a0e18a8c90b258c39362c
|
[
"PostgreSQL"
] | 1 |
2018-03-16T22:32:24.000Z
|
2018-03-16T22:32:24.000Z
|
# Test how pg_rewind reacts to extra files and directories in the data dirs.
use strict;
use warnings;
use TestLib;
use Test::More tests => 4;
use File::Find;
use RewindTest;
sub run_test
{
my $test_mode = shift;
RewindTest::init_rewind_test('extrafiles', $test_mode);
RewindTest::setup_cluster();
my $test_master_datadir = $RewindTest::test_master_datadir;
# Create a subdir and files that will be present in both
mkdir "$test_master_datadir/tst_both_dir";
append_to_file "$test_master_datadir/tst_both_dir/both_file1", "in both1";
append_to_file "$test_master_datadir/tst_both_dir/both_file2", "in both2";
mkdir "$test_master_datadir/tst_both_dir/both_subdir/";
append_to_file "$test_master_datadir/tst_both_dir/both_subdir/both_file3", "in both3";
RewindTest::create_standby();
# Create different subdirs and files in master and standby
mkdir "$test_standby_datadir/tst_standby_dir";
append_to_file "$test_standby_datadir/tst_standby_dir/standby_file1", "in standby1";
append_to_file "$test_standby_datadir/tst_standby_dir/standby_file2", "in standby2";
mkdir "$test_standby_datadir/tst_standby_dir/standby_subdir/";
append_to_file "$test_standby_datadir/tst_standby_dir/standby_subdir/standby_file3", "in standby3";
mkdir "$test_master_datadir/tst_master_dir";
append_to_file "$test_master_datadir/tst_master_dir/master_file1", "in master1";
append_to_file "$test_master_datadir/tst_master_dir/master_file2", "in master2";
mkdir "$test_master_datadir/tst_master_dir/master_subdir/";
append_to_file "$test_master_datadir/tst_master_dir/master_subdir/master_file3", "in master3";
RewindTest::promote_standby();
RewindTest::run_pg_rewind($test_mode);
# List files in the data directory after rewind.
my @paths;
find(sub {push @paths, $File::Find::name if $File::Find::name =~ m/.*tst_.*/},
$test_master_datadir);
@paths = sort @paths;
is_deeply(\@paths,
["$test_master_datadir/tst_both_dir",
"$test_master_datadir/tst_both_dir/both_file1",
"$test_master_datadir/tst_both_dir/both_file2",
"$test_master_datadir/tst_both_dir/both_subdir",
"$test_master_datadir/tst_both_dir/both_subdir/both_file3",
"$test_master_datadir/tst_standby_dir",
"$test_master_datadir/tst_standby_dir/standby_file1",
"$test_master_datadir/tst_standby_dir/standby_file2",
"$test_master_datadir/tst_standby_dir/standby_subdir",
"$test_master_datadir/tst_standby_dir/standby_subdir/standby_file3"],
"file lists match");
RewindTest::clean_rewind_test();
}
# Run the test in both modes.
run_test('local');
run_test('remote');
exit(0);
| 35.391892 | 100 | 0.777778 |
ed6c9feeba808dd013037109af715545eebcbc1b
| 1,105 |
pm
|
Perl
|
auto-lib/Paws/NimbleStudio/GetLaunchProfileDetailsResponse.pm
|
0leksii/aws-sdk-perl
|
b2132fe3c79a06fd15b6137e8a0eb628de722e0f
|
[
"Apache-2.0"
] | 164 |
2015-01-08T14:58:53.000Z
|
2022-02-20T19:16:24.000Z
|
auto-lib/Paws/NimbleStudio/GetLaunchProfileDetailsResponse.pm
|
0leksii/aws-sdk-perl
|
b2132fe3c79a06fd15b6137e8a0eb628de722e0f
|
[
"Apache-2.0"
] | 348 |
2015-01-07T22:08:38.000Z
|
2022-01-27T14:34:44.000Z
|
auto-lib/Paws/NimbleStudio/GetLaunchProfileDetailsResponse.pm
|
0leksii/aws-sdk-perl
|
b2132fe3c79a06fd15b6137e8a0eb628de722e0f
|
[
"Apache-2.0"
] | 87 |
2015-04-22T06:29:47.000Z
|
2021-09-29T14:45:55.000Z
|
package Paws::NimbleStudio::GetLaunchProfileDetailsResponse;
use Moose;
has LaunchProfile => (is => 'ro', isa => 'Paws::NimbleStudio::LaunchProfile', traits => ['NameInRequest'], request_name => 'launchProfile');
has StreamingImages => (is => 'ro', isa => 'ArrayRef[Paws::NimbleStudio::StreamingImage]', traits => ['NameInRequest'], request_name => 'streamingImages');
has StudioComponentSummaries => (is => 'ro', isa => 'ArrayRef[Paws::NimbleStudio::StudioComponentSummary]', traits => ['NameInRequest'], request_name => 'studioComponentSummaries');
has _request_id => (is => 'ro', isa => 'Str');
1;
### main pod documentation begin ###
=head1 NAME
Paws::NimbleStudio::GetLaunchProfileDetailsResponse
=head1 ATTRIBUTES
=head2 LaunchProfile => L<Paws::NimbleStudio::LaunchProfile>
The launch profile.
=head2 StreamingImages => ArrayRef[L<Paws::NimbleStudio::StreamingImage>]
A collection of streaming images.
=head2 StudioComponentSummaries => ArrayRef[L<Paws::NimbleStudio::StudioComponentSummary>]
A collection of studio component summaries.
=head2 _request_id => Str
=cut
| 27.625 | 183 | 0.732127 |
ed14fb03c0acb4368e6e1893f5a9e925d165c34a
| 4,445 |
pl
|
Perl
|
misc_utilities/SAM_to_gtf.pl
|
nextgenusfs/PASApipeline
|
3eb15edfc971becd49332f1e31ca16ec3152ef1e
|
[
"BSD-3-Clause"
] | null | null | null |
misc_utilities/SAM_to_gtf.pl
|
nextgenusfs/PASApipeline
|
3eb15edfc971becd49332f1e31ca16ec3152ef1e
|
[
"BSD-3-Clause"
] | null | null | null |
misc_utilities/SAM_to_gtf.pl
|
nextgenusfs/PASApipeline
|
3eb15edfc971becd49332f1e31ca16ec3152ef1e
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env perl
use strict;
use warnings;
use FindBin;
use lib ("$FindBin::RealBin/../PerlLib");
use SAM_reader;
use SAM_entry;
my $usage = "usage: $0 file.sam [debug_flag=0]\n\n";
my $sam_file = $ARGV[0] or die $usage;
my $DEBUG = $ARGV[1];
main: {
my %PATH_COUNTER;
my $sam_reader = new SAM_reader($sam_file);
while ($sam_reader->has_next()) {
my $sam_entry = $sam_reader->get_next();
if ($sam_entry->is_query_unmapped()) {
next;
}
my $sequence = $sam_entry->get_sequence();
if ($sequence eq "*") {
next;
}
my $sam_line = $sam_entry->get_original_line();
if ($DEBUG) {
print "$sam_line\n";
}
my $num_mismatches = 0;
if ($sam_line =~ /NM:i:(\d+)/) {
$num_mismatches = $1;
}
else {
die "Error, couldn't extract num mismatches from sam line: $sam_line";
}
my $read_name = $sam_entry->get_read_name();
my $scaff_name = $sam_entry->get_scaffold_name();
my $strand = $sam_entry->get_query_strand();
my ($genome_coords_aref, $query_coords_aref) = $sam_entry->get_alignment_coords();
my $align_len = 0;
{
foreach my $coordset (@$genome_coords_aref) {
my $seglen = abs($coordset->[1] - $coordset->[0]) + 1;
$align_len += $seglen;
if ($DEBUG) {
print STDERR join("\t", $coordset->[0], $coordset->[1], "seglen: $seglen") . "\n";
}
}
}
if ($DEBUG) {
print STDERR "num_mismatches: $num_mismatches, align_length: $align_len\n";
}
#my $per_id = sprintf("%.1f", 100 - $num_mismatches/$align_len * 100);
my $align_counter = "$read_name.p" . ++$PATH_COUNTER{$read_name};
my @genome_n_trans_coords;
while (@$genome_coords_aref) {
my $genome_coordset_aref = shift @$genome_coords_aref;
my $trans_coordset_aref = shift @$query_coords_aref;
my ($genome_lend, $genome_rend) = @$genome_coordset_aref;
my ($trans_lend, $trans_rend) = sort {$a<=>$b} @$trans_coordset_aref;
push (@genome_n_trans_coords, [ $genome_lend, $genome_rend, $trans_lend, $trans_rend ] );
}
#use Data::Dumper;
#print Dumper(\@genome_n_trans_coords);
## merge neighboring features if within a short distance unlikely to represent an intron.
my @merged_coords;
push (@merged_coords, shift @genome_n_trans_coords);
my $MERGE_DIST = 10;
while (@genome_n_trans_coords) {
my $coordset_ref = shift @genome_n_trans_coords;
my $last_coordset_ref = $merged_coords[$#merged_coords];
if ($coordset_ref->[0] - $last_coordset_ref->[1] <= $MERGE_DIST) {
# merge it.
$last_coordset_ref->[1] = $coordset_ref->[1];
if ($strand eq "+") {
$last_coordset_ref->[3] = $coordset_ref->[3];
} else {
$last_coordset_ref->[2] = $coordset_ref->[2];
}
}
else {
# not merging.
push (@merged_coords, $coordset_ref);
}
}
my $trans_align_len = 0;
foreach my $coordset_ref (@merged_coords) {
my ($genome_lend, $genome_rend, $trans_lend, $trans_rend) = @$coordset_ref;
$trans_align_len += $trans_rend - $trans_lend + 1;
}
if ($DEBUG) {
print "interval-based alignment length: $trans_align_len\n";
}
my $per_id = sprintf("%.2f", 100 - ($num_mismatches / $trans_align_len * 100));
foreach my $coordset_ref (@merged_coords) {
my ($genome_lend, $genome_rend, $trans_lend, $trans_rend) = @$coordset_ref;
print join("\t",
$scaff_name,
"genome",
"cDNA_match",
$genome_lend, $genome_rend,
$per_id,
$strand,
".",
"ID=$align_counter;Target=$read_name $trans_lend $trans_rend") . "\n";
}
print "\n";
}
exit(0);
}
| 28.132911 | 102 | 0.506412 |
ed26447586a608d8b797de66bf5780628517ba5b
| 458 |
al
|
Perl
|
KOST-Val/src/main/resources_notJar/resources/ExifTool-10.15/Perl/site/lib/auto/Tk/Scale/Leave.al
|
rebplu/KOST-VAL
|
1537125425068d5faec3bc4f5263df715956ae76
|
[
"BSD-3-Clause-No-Nuclear-Warranty"
] | null | null | null |
KOST-Val/src/main/resources_notJar/resources/ExifTool-10.15/Perl/site/lib/auto/Tk/Scale/Leave.al
|
rebplu/KOST-VAL
|
1537125425068d5faec3bc4f5263df715956ae76
|
[
"BSD-3-Clause-No-Nuclear-Warranty"
] | null | null | null |
KOST-Val/src/main/resources_notJar/resources/ExifTool-10.15/Perl/site/lib/auto/Tk/Scale/Leave.al
|
rebplu/KOST-VAL
|
1537125425068d5faec3bc4f5263df715956ae76
|
[
"BSD-3-Clause-No-Nuclear-Warranty"
] | null | null | null |
# NOTE: Derived from ..\blib\lib\Tk\Scale.pm.
# Changes made here will be lost when autosplit is run again.
# See AutoSplit.pm.
package Tk::Scale;
#line 111 "..\blib\lib\Tk\Scale.pm (autosplit into ..\blib\lib\auto\Tk\Scale\Leave.al)"
sub Leave
{
my ($w) = @_;
$w->configure('-activebackground',$w->{'activeBg'}) if ($Tk::strictMotif);
$w->configure('-state','normal') if ($w->cget('-state') eq 'active');
}
# end of Tk::Scale::Leave
1;
| 28.625 | 88 | 0.631004 |
ed1be213897350152f63ca9052e439a3343d83ab
| 143,848 |
pm
|
Perl
|
Source/Manip/Date.pm
|
ssp/Pester
|
f2d8ec2f62bfb83656f77f3ee41b54149287904a
|
[
"BSD-2-Clause"
] | 59 |
2015-01-11T18:44:25.000Z
|
2022-03-07T22:56:02.000Z
|
Source/Manip/Date.pm
|
ssp/Pester
|
f2d8ec2f62bfb83656f77f3ee41b54149287904a
|
[
"BSD-2-Clause"
] | 11 |
2015-06-19T11:01:00.000Z
|
2018-06-05T21:30:17.000Z
|
Source/Manip/Date.pm
|
ssp/Pester
|
f2d8ec2f62bfb83656f77f3ee41b54149287904a
|
[
"BSD-2-Clause"
] | 7 |
2015-09-21T21:04:59.000Z
|
2022-02-13T18:26:47.000Z
|
package Date::Manip::Date;
# Copyright (c) 1995-2014 Sullivan Beck. All rights reserved.
# This program is free software; you can redistribute it and/or modify it
# under the same terms as Perl itself.
########################################################################
# Any routine that starts with an underscore (_) is NOT intended for
# public use. They are for internal use in the the Date::Manip
# modules and are subject to change without warning or notice.
#
# ABSOLUTELY NO USER SUPPORT IS OFFERED FOR THESE ROUTINES!
########################################################################
use Date::Manip::Obj;
@ISA = ('Date::Manip::Obj');
require 5.010000;
use warnings;
use strict;
use integer;
use utf8;
use IO::File;
use Storable qw(dclone);
#use re 'debug';
use Date::Manip::Base;
use Date::Manip::TZ;
our $VERSION;
$VERSION='6.48';
END { undef $VERSION; }
########################################################################
# BASE METHODS
########################################################################
# Call this every time a new date is put in to make sure everything is
# correctly initialized.
#
sub _init {
my($self) = @_;
$$self{'err'} = '';
$$self{'data'} =
{
'set' => 0, # 1 if the date has been set
# 2 if the date is in the process of being set
# The date as input
'in' => '', # the string that was parsed (if any)
'zin' => '', # the timezone that was parsed (if any)
# The date in the parsed timezone
'date' => [], # the parsed date split
'def' => [0,0,0,0,0,0],
# 1 for each field that came from
# defaults rather than parsed
# '' for an implied field
'tz' => '', # the timezone of the date
'isdst' => '', # 1 if the date is in DST.
'offset' => [], # The offset from GMT
'abb' => '', # The timezone abbreviation.
'f' => {}, # fields used in printing a date
# The date in GMT
'gmt' => [], # the date converted to GMT
# The date in local timezone
'loc' => [], # the date converted to local timezone
};
}
sub _init_args {
my($self) = @_;
my @args = @{ $$self{'args'} };
if (@args) {
if ($#args == 0) {
$self->parse($args[0]);
} else {
warn "WARNING: [new] invalid arguments: @args\n";
}
}
}
sub input {
my($self) = @_;
return $$self{'data'}{'in'};
}
########################################################################
# DATE PARSING
########################################################################
sub parse {
my($self,$instring,@opts) = @_;
$self->_init();
my $noupdate = 0;
if (! $instring) {
$$self{'err'} = '[parse] Empty date string';
return 1;
}
my %opts = map { $_,1 } @opts;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
my($done,$y,$m,$d,$h,$mn,$s,$tzstring,$zone,$abb,$off,$dow,$got_time,
$default_time,$firsterr);
ENCODING:
foreach my $string ($dmb->_encoding($instring)) {
$got_time = 0;
$default_time = 0;
# Put parse in a simple loop for an easy exit.
PARSE:
{
my(@tmp,$tmp);
$$self{'err'} = '';
# Check the standard date format
$tmp = $dmb->split('date',$string);
if (defined($tmp)) {
($y,$m,$d,$h,$mn,$s) = @$tmp;
$got_time = 1;
last PARSE;
}
# Parse ISO 8601 dates now (which may have a timezone).
if (! exists $opts{'noiso8601'}) {
($done,@tmp) = $self->_parse_datetime_iso8601($string,\$noupdate);
if ($done) {
($y,$m,$d,$h,$mn,$s,$tzstring,$zone,$abb,$off) = @tmp;
$got_time = 1;
last PARSE;
}
}
# There's lots of ways that commas may be included. Remove
# them (unless it's preceded and followed by a digit in
# which case it's probably a fractional separator).
$string =~ s/(?<!\d),/ /g;
$string =~ s/,(?!\d)/ /g;
# Some special full date/time formats ('now', 'epoch')
if (! exists $opts{'nospecial'}) {
($done,@tmp) = $self->_parse_datetime_other($string,\$noupdate);
if ($done) {
($y,$m,$d,$h,$mn,$s,$tzstring,$zone,$abb,$off) = @tmp;
$got_time = 1;
last PARSE;
}
}
# Parse (and remove) the time (and an immediately following timezone).
($got_time,@tmp) = $self->_parse_time('parse',$string,\$noupdate,%opts);
if ($got_time) {
($string,$h,$mn,$s,$tzstring,$zone,$abb,$off) = @tmp;
}
if (! $string) {
($y,$m,$d) = $self->_def_date($y,$m,$d,\$noupdate);
last;
}
# Parse (and remove) the day of week. Also, handle the simple DoW
# formats.
if (! exists $opts{'nodow'}) {
($done,@tmp) = $self->_parse_dow($string,\$noupdate);
if (@tmp) {
if ($done) {
($y,$m,$d) = @tmp;
$default_time = 1;
last PARSE;
} else {
($string,$dow) = @tmp;
}
}
}
$dow = 0 if (! $dow);
# At this point, the string might contain the following dates:
#
# OTHER
# OTHER ZONE / ZONE OTHER
# DELTA
# DELTA ZONE / ZONE DELTA
# HOLIDAY
# HOLIDAY ZONE / ZONE HOLIDAY
#
# ZONE is only allowed if it wasn't parsed with the time
# Unfortunately, there are some conflicts between zones and
# some other formats, so try parsing the entire string as a date.
(@tmp) = $self->_parse_date($string,$dow,\$noupdate,%opts);
if (@tmp) {
($y,$m,$d,$dow) = @tmp;
$default_time = 1;
last PARSE;
}
# Parse any timezone
if (! $tzstring) {
($string,@tmp) = $self->_parse_tz($string,\$noupdate);
($tzstring,$zone,$abb,$off) = @tmp if (@tmp);
last PARSE if (! $string);
}
# Try the remainder of the string as a date.
if ($tzstring) {
(@tmp) = $self->_parse_date($string,$dow,\$noupdate,%opts);
if (@tmp) {
($y,$m,$d,$dow) = @tmp;
$default_time = 1;
last PARSE;
}
}
# Parse deltas
#
# Occasionally, a delta is entered for a date (which is
# interpreted as the date relative to now). There can be some
# confusion between a date and a delta, but the most
# important conflicts are the ISO 8601 dates (many of which
# could be interpreted as a delta), but those have already
# been taken care of.
#
# We may have already gotten the time:
# 3 days ago at midnight UTC
# (we already stripped off the 'at midnight UTC' above).
#
# We also need to handle the sitution of a delta and a timezone.
# in 2 hours EST
# in 2 days EST
# but only if no time was entered.
if (! exists $opts{'nodelta'}) {
($done,@tmp) =
$self->_parse_delta($string,$dow,$got_time,$h,$mn,$s,\$noupdate);
if (@tmp) {
($y,$m,$d,$h,$mn,$s) = @tmp;
$got_time = 1;
$dow = '';
}
last PARSE if ($done);
}
# Parse holidays
unless (exists $opts{'noholidays'}) {
($done,@tmp) =
$self->_parse_holidays($string,\$noupdate);
if (@tmp) {
($y,$m,$d) = @tmp;
}
last PARSE if ($done);
}
$$self{'err'} = '[parse] Invalid date string';
last PARSE;
}
# We got an error parsing this encoding of the string. It could
# be that it is a genuine error, or it may be that we simply
# need to try a different encoding. If ALL encodings fail, we'll
# return the error from the first one.
if ($$self{'err'}) {
if (! $firsterr) {
$firsterr = $$self{'err'};
}
next ENCODING;
}
# If we didn't get an error, this is the string to use.
last ENCODING;
}
if ($$self{'err'}) {
$$self{'err'} = $firsterr;
return 1;
}
# Make sure that a time is set
if (! $got_time) {
if ($default_time) {
if ($dmb->_config('defaulttime') eq 'midnight') {
($h,$mn,$s) = (0,0,0);
} else {
($h,$mn,$s) = $dmt->_now('time',$noupdate);
$noupdate = 1;
}
$got_time = 1;
} else {
($h,$mn,$s) = $self->_def_time(undef,undef,undef,\$noupdate);
}
}
$$self{'data'}{'set'} = 2;
return $self->_parse_check('parse',$instring,
$y,$m,$d,$h,$mn,$s,$dow,$tzstring,$zone,$abb,$off);
}
sub parse_time {
my($self,$string,@opts) = @_;
my %opts = map { $_,1 } @opts;
my $noupdate = 0;
if (! $string) {
$$self{'err'} = '[parse_time] Empty time string';
return 1;
}
my($y,$m,$d,$h,$mn,$s);
if ($$self{'err'}) {
$self->_init();
}
if ($$self{'data'}{'set'}) {
($y,$m,$d,$h,$mn,$s) = @{ $$self{'data'}{'date'} };
} else {
my $dmt = $$self{'tz'};
($y,$m,$d,$h,$mn,$s) = $dmt->_now('now',$noupdate);
$noupdate = 1;
}
my($tzstring,$zone,$abb,$off);
($h,$mn,$s,$tzstring,$zone,$abb,$off) =
$self->_parse_time('parse_time',$string,\$noupdate,%opts);
return 1 if ($$self{'err'});
$$self{'data'}{'set'} = 2;
return $self->_parse_check('parse_time','',
$y,$m,$d,$h,$mn,$s,'',$tzstring,$zone,$abb,$off);
}
sub parse_date {
my($self,$string,@opts) = @_;
my %opts = map { $_,1 } @opts;
my $noupdate = 0;
if (! $string) {
$$self{'err'} = '[parse_date] Empty date string';
return 1;
}
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
my($y,$m,$d,$h,$mn,$s);
if ($$self{'err'}) {
$self->_init();
}
if ($$self{'data'}{'set'}) {
($y,$m,$d,$h,$mn,$s) = @{ $$self{'data'}{'date'} };
} else {
($h,$mn,$s) = (0,0,0);
}
# Put parse in a simple loop for an easy exit.
my($done,@tmp,$dow);
PARSE:
{
# Parse ISO 8601 dates now
unless (exists $opts{'noiso8601'}) {
($done,@tmp) = $self->_parse_date_iso8601($string,\$noupdate);
if ($done) {
($y,$m,$d) = @tmp;
last PARSE;
}
}
(@tmp) = $self->_parse_date($string,undef,\$noupdate,%opts);
if (@tmp) {
($y,$m,$d,$dow) = @tmp;
last PARSE;
}
$$self{'err'} = '[parse_date] Invalid date string';
return 1;
}
return 1 if ($$self{'err'});
$y = $dmt->_fix_year($y);
$$self{'data'}{'set'} = 2;
return $self->_parse_check('parse_date','',$y,$m,$d,$h,$mn,$s,$dow);
}
sub _parse_date {
my($self,$string,$dow,$noupdate,%opts) = @_;
# There's lots of ways that commas may be included. Remove
# them.
#
# Also remove some words we should ignore.
$string =~ s/,/ /g;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
my $ign = (exists $$dmb{'data'}{'rx'}{'other'}{'ignore'} ?
$$dmb{'data'}{'rx'}{'other'}{'ignore'} :
$self->_other_rx('ignore'));
$string =~ s/$ign/ /g;
my $of = $+{'of'};
$string =~ s/\s*$//;
return () if (! $string);
my($done,$y,$m,$d,@tmp);
# Put parse in a simple loop for an easy exit.
PARSE:
{
# Parse (and remove) the day of week. Also, handle the simple DoW
# formats.
unless (exists $opts{'nodow'}) {
if (! defined($dow)) {
($done,@tmp) = $self->_parse_dow($string,$noupdate);
if (@tmp) {
if ($done) {
($y,$m,$d) = @tmp;
last PARSE;
} else {
($string,$dow) = @tmp;
}
}
$dow = 0 if (! $dow);
}
}
# Parse common dates
unless (exists $opts{'nocommon'}) {
(@tmp) = $self->_parse_date_common($string,$noupdate);
if (@tmp) {
($y,$m,$d) = @tmp;
last PARSE;
}
}
# Parse less common dates
unless (exists $opts{'noother'}) {
(@tmp) = $self->_parse_date_other($string,$dow,$of,$noupdate);
if (@tmp) {
($y,$m,$d,$dow) = @tmp;
last PARSE;
}
}
return ();
}
return($y,$m,$d,$dow);
}
sub parse_format {
my($self,$format,$string) = @_;
$self->_init();
my $noupdate = 0;
if (! $string) {
$$self{'err'} = '[parse_format] Empty date string';
return 1;
}
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
my($err,$re) = $self->_format_regexp($format);
return $err if ($err);
return 1 if ($string !~ $re);
my($y,$m,$d,$h,$mn,$s,
$mon_name,$mon_abb,$dow_name,$dow_abb,$dow_char,$dow_num,
$doy,$nth,$ampm,$epochs,$epocho,
$tzstring,$off,$abb,$zone,
$g,$w,$l,$u) =
@+{qw(y m d h mn s
mon_name mon_abb dow_name dow_abb dow_char dow_num doy
nth ampm epochs epocho tzstring off abb zone g w l u)};
while (1) {
# Get y/m/d/h/mn/s from:
# $epochs,$epocho
if (defined($epochs)) {
($y,$m,$d,$h,$mn,$s) = @{ $dmb->secs_since_1970($epochs) };
my $z;
if ($zone) {
$z = $dmt->_zone($zone);
return 'Invalid zone' if (! $z);
} elsif ($abb || $off) {
$z = $dmt->zone($off,$abb);
return 'Invalid zone' if (! $z);
} else {
$z = $dmt->_now('tz',$noupdate);
$noupdate = 1;
}
my($err,$date) = $dmt->convert_from_gmt([$y,$m,$d,$h,$mn,$s],$z);
($y,$m,$d,$h,$mn,$s) = @$date;
last;
}
if (defined($epocho)) {
($y,$m,$d,$h,$mn,$s) = @{ $dmb->secs_since_1970($epocho) };
last;
}
# Get y/m/d from:
# $y,$m,$d,
# $mon_name,$mon_abb
# $doy,$nth
# $g/$w,$l/$u
if ($mon_name) {
$m = $$dmb{'data'}{'wordmatch'}{'month_name'}{lc($mon_name)};
} elsif ($mon_abb) {
$m = $$dmb{'data'}{'wordmatch'}{'month_abb'}{lc($mon_abb)};
}
if ($nth) {
$d = $$dmb{'data'}{'wordmatch'}{'nth'}{lc($nth)};
}
if ($doy) {
$y = $dmt->_now('y',$noupdate) if (! $y);
$noupdate = 1;
($y,$m,$d) = @{ $dmb->day_of_year($y,$doy) };
} elsif ($g) {
$y = $dmt->_now('y',$noupdate) if (! $y);
$noupdate = 1;
($y,$m,$d) = @{ $dmb->_week_of_year($g,$w,1) };
} elsif ($l) {
$y = $dmt->_now('y',$noupdate) if (! $y);
$noupdate = 1;
($y,$m,$d) = @{ $dmb->_week_of_year($l,$u,7) };
} elsif ($m) {
($y,$m,$d) = $self->_def_date($y,$m,$d,\$noupdate);
}
# Get h/mn/s from:
# $h,$mn,$s,$ampm
if (defined($h)) {
($h,$mn,$s) = $self->_def_time($h,$mn,$s,\$noupdate);
}
if ($ampm) {
if ($$dmb{'data'}{'wordmatch'}{'ampm'}{lc($ampm)} == 2) {
# pm times
$h+=12 unless ($h==12);
} else {
# am times
$h=0 if ($h==12);
}
}
# Get dow from:
# $dow_name,$dow_abb,$dow_char,$dow_num
if ($dow_name) {
$dow_num = $$dmb{'data'}{'wordmatch'}{'day_name'}{lc($dow_name)};
} elsif ($dow_abb) {
$dow_num = $$dmb{'data'}{'wordmatch'}{'day_abb'}{lc($dow_abb)};
} elsif ($dow_char) {
$dow_num = $$dmb{'data'}{'wordmatch'}{'day_char'}{lc($dow_char)};
}
last;
}
if (! $m) {
($y,$m,$d) = $dmt->_now('now',$noupdate);
$noupdate = 1;
}
if (! defined($h)) {
($h,$mn,$s) = (0,0,0);
}
$$self{'data'}{'set'} = 2;
$err = $self->_parse_check('parse_format',$string,
$y,$m,$d,$h,$mn,$s,$dow_num,
$tzstring,$zone,$abb,$off);
if (wantarray) {
my %tmp = %{ dclone(\%+) };
return ($err,%tmp);
}
return $err;
}
BEGIN {
my %y_form = map { $_,1 } qw( Y y s o G L );
my %m_form = map { $_,1 } qw( m f b h B j s o W U );
my %d_form = map { $_,1 } qw( j d e E s o W U );
my %h_form = map { $_,1 } qw( H I k i s o );
my %mn_form = map { $_,1 } qw( M s o );
my %s_form = map { $_,1 } qw( S s o );
my %dow_form = map { $_,1 } qw( v a A w );
my %am_form = map { $_,1 } qw( p s o );
my %z_form = map { $_,1 } qw( Z z N );
my %mon_form = map { $_,1 } qw( b h B );
my %day_form = map { $_,1 } qw( v a A );
sub _format_regexp {
my($self,$format) = @_;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
if (exists $$dmb{'data'}{'format'}{$format}) {
return @{ $$dmb{'data'}{'format'}{$format} };
}
my $re;
my $err;
my($y,$m,$d,$h,$mn,$s) = (0,0,0,0,0,0);
my($dow,$ampm,$zone,$G,$W,$L,$U) = (0,0,0,0,0,0,0);
while ($format) {
last if ($format eq '%');
if ($format =~ s/^([^%]+)//) {
$re .= $1;
next;
}
$format =~ s/^%(.)//;
my $f = $1;
if (exists $y_form{$f}) {
if ($y) {
$err = 'Year specified multiple times';
last;
}
$y = 1;
}
if (exists $m_form{$f}) {
if ($m) {
$err = 'Month specified multiple times';
last;
}
$m = 1;
}
if (exists $d_form{$f}) {
if ($d) {
$err = 'Day specified multiple times';
last;
}
$d = 1;
}
if (exists $h_form{$f}) {
if ($h) {
$err = 'Hour specified multiple times';
last;
}
$h = 1;
}
if (exists $mn_form{$f}) {
if ($mn) {
$err = 'Minutes specified multiple times';
last;
}
$mn = 1;
}
if (exists $s_form{$f}) {
if ($s) {
$err = 'Seconds specified multiple times';
last;
}
$s = 1;
}
if (exists $dow_form{$f}) {
if ($dow) {
$err = 'Day-of-week specified multiple times';
last;
}
$dow = 1;
}
if (exists $am_form{$f}) {
if ($ampm) {
$err = 'AM/PM specified multiple times';
last;
}
$ampm = 1;
}
if (exists $z_form{$f}) {
if ($zone) {
$err = 'Zone specified multiple times';
last;
}
$zone = 1;
}
if ($f eq 'G') {
if ($G) {
$err = 'G specified multiple times';
last;
}
$G = 1;
} elsif ($f eq 'W') {
if ($W) {
$err = 'W specified multiple times';
last;
}
$W = 1;
} elsif ($f eq 'L') {
if ($L) {
$err = 'L specified multiple times';
last;
}
$L = 1;
} elsif ($f eq 'U') {
if ($U) {
$err = 'U specified multiple times';
last;
}
$U = 1;
}
###
if ($f eq 'Y') {
$re .= '(?<y>\d\d\d\d)';
} elsif ($f eq 'y') {
$re .= '(?<y>\d\d)';
} elsif ($f eq 'm') {
$re .= '(?<m>\d\d)';
} elsif ($f eq 'f') {
$re .= '(?:(?<m>\d\d)| ?(?<m>\d))';
} elsif (exists $mon_form{$f}) {
my $abb = $$dmb{'data'}{'rx'}{'month_abb'}[0];
my $nam = $$dmb{'data'}{'rx'}{'month_name'}[0];
$re .= "(?:(?<mon_name>$nam)|(?<mon_abb>$abb))";
} elsif ($f eq 'j') {
$re .= '(?<doy>\d\d\d)';
} elsif ($f eq 'd') {
$re .= '(?<d>\d\d)';
} elsif ($f eq 'e') {
$re .= '(?:(?<d>\d\d)| ?(?<d>\d))';
} elsif (exists $day_form{$f}) {
my $abb = $$dmb{'data'}{'rx'}{'day_abb'}[0];
my $name = $$dmb{'data'}{'rx'}{'day_name'}[0];
my $char = $$dmb{'data'}{'rx'}{'day_char'}[0];
$re .= "(?:(?<dow_name>$name)|(?<dow_abb>$abb)|(?<dow_char>$char))";
} elsif ($f eq 'w') {
$re .= '(?<dow_num>[1-7])';
} elsif ($f eq 'E') {
my $nth = $$dmb{'data'}{'rx'}{'nth'}[0];
$re .= "(?<nth>$nth)"
} elsif ($f eq 'H' || $f eq 'I') {
$re .= '(?<h>\d\d)';
} elsif ($f eq 'k' || $f eq 'i') {
$re .= '(?:(?<h>\d\d)| ?(?<h>\d))';
} elsif ($f eq 'p') {
my $ampm = $$dmb{data}{rx}{ampm}[0];
$re .= "(?<ampm>$ampm)";
} elsif ($f eq 'M') {
$re .= '(?<mn>\d\d)';
} elsif ($f eq 'S') {
$re .= '(?<s>\d\d)';
} elsif (exists $z_form{$f}) {
$re .= $dmt->_zrx('zrx');
} elsif ($f eq 's') {
$re .= '(?<epochs>\d+)';
} elsif ($f eq 'o') {
$re .= '(?<epocho>\d+)';
} elsif ($f eq 'G') {
$re .= '(?<g>\d\d\d\d)';
} elsif ($f eq 'W') {
$re .= '(?<w>\d\d)';
} elsif ($f eq 'L') {
$re .= '(?<l>\d\d\d\d)';
} elsif ($f eq 'U') {
$re .= '(?<u>\d\d)';
} elsif ($f eq 'c') {
$format = '%a %b %e %H:%M:%S %Y' . $format;
} elsif ($f eq 'C' || $f eq 'u') {
$format = '%a %b %e %H:%M:%S %Z %Y' . $format;
} elsif ($f eq 'g') {
$format = '%a, %d %b %Y %H:%M:%S %Z' . $format;
} elsif ($f eq 'D') {
$format = '%m/%d/%y' . $format;
} elsif ($f eq 'r') {
$format = '%I:%M:%S %p' . $format;
} elsif ($f eq 'R') {
$format = '%H:%M' . $format;
} elsif ($f eq 'T' || $f eq 'X') {
$format = '%H:%M:%S' . $format;
} elsif ($f eq 'V') {
$format = '%m%d%H%M%y' . $format;
} elsif ($f eq 'Q') {
$format = '%Y%m%d' . $format;
} elsif ($f eq 'q') {
$format = '%Y%m%d%H%M%S' . $format;
} elsif ($f eq 'P') {
$format = '%Y%m%d%H:%M:%S' . $format;
} elsif ($f eq 'O') {
$format = '%Y\\-%m\\-%dT%H:%M:%S' . $format;
} elsif ($f eq 'F') {
$format = '%A, %B %e, %Y' . $format;
} elsif ($f eq 'K') {
$format = '%Y-%j' . $format;
} elsif ($f eq 'J') {
$format = '%G-W%W-%w' . $format;
} elsif ($f eq 'x') {
if ($dmb->_config('dateformat') eq 'US') {
$format = '%m/%d/%y' . $format;
} else {
$format = '%d/%m/%y' . $format;
}
} elsif ($f eq 't') {
$re .= "\t";
} elsif ($f eq '%') {
$re .= '%';
} elsif ($f eq '+') {
$re .= '\\+';
}
}
if ($m != $d) {
$err = 'Date not fully specified';
} elsif ( ($h || $mn || $s) && (! $h || ! $mn) ) {
$err = 'Time not fully specified';
} elsif ($ampm && ! $h) {
$err = 'Time not fully specified';
} elsif ($G != $W) {
$err = 'G/W must both be specified';
} elsif ($L != $U) {
$err = 'L/U must both be specified';
}
if ($err) {
$$dmb{'data'}{'format'}{$format} = [$err];
return ($err);
}
$$dmb{'data'}{'format'}{$format} = [0, qr/$re/i];
return @{ $$dmb{'data'}{'format'}{$format} };
}
}
########################################################################
# DATE FORMATS
########################################################################
sub _parse_check {
my($self,$caller,$instring,
$y,$m,$d,$h,$mn,$s,$dow,$tzstring,$zone,$abb,$off) = @_;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
# Check day_of_week for validity BEFORE converting 24:00:00 to the
# next day
if ($dow) {
my $tmp = $dmb->day_of_week([$y,$m,$d]);
if ($tmp != $dow) {
$$self{'err'} = "[$caller] Day of week invalid";
return 1;
}
}
# Handle 24:00:00 times.
if ($h == 24) {
($h,$mn,$s) = (0,0,0);
($y,$m,$d) = @{ $dmb->calc_date_days([$y,$m,$d],1) };
}
if (! $dmb->check([$y,$m,$d,$h,$mn,$s])) {
$$self{'err'} = "[$caller] Invalid date";
return 1;
}
# Interpret timezone information and check that date is valid
# in the timezone.
my ($zonename,$isdst);
if (defined($zone)) {
$zonename = $dmt->_zone($zone);
if (! $zonename) {
$$self{'err'} = "[$caller] Unable to determine timezone: $zone";
return 1;
}
} elsif (defined($abb) || defined($off)) {
my (@tmp,$err);
push(@tmp,[$y,$m,$d,$h,$mn,$s]);
push(@tmp,$off) if (defined $off);
push(@tmp,$abb) if (defined $abb);
$zonename = $dmt->zone(@tmp);
if (! $zonename) {
$$self{'err'} = 'Unable to determine timezone';
return 1;
}
# Figure out $isdst from $abb/$off (for everything else, we'll
# try both values).
if (defined $off || defined $abb) {
my @off = @{ $dmb->split('offset',$off) } if (defined($off));
my $err = 1;
foreach my $i (0,1) {
my $per = $dmt->date_period([$y,$m,$d,$h,$mn,$s],$zonename,1,$i);
next if (! $per);
my $a = $$per[4];
my $o = $$per[3];
if (defined $abb && lc($a) eq lc($abb)) {
$err = 0;
$isdst = $i;
$abb = $a;
last;
}
if (defined ($off)) {
if ($off[0] == $$o[0] &&
$off[1] == $$o[1] &&
$off[2] == $$o[2]) {
$err = 0;
$isdst = $i;
last;
}
}
}
if ($err) {
$$self{'err'} = 'Invalid timezone';
return 1;
}
}
} else {
$zonename = $dmt->_now('tz');
}
# Store the date
$self->set('zdate',$zonename,[$y,$m,$d,$h,$mn,$s],$isdst);
return 1 if ($$self{'err'});
$$self{'data'}{'in'} = $instring;
$$self{'data'}{'zin'} = $zone if (defined($zone));
return 0;
}
# Set up the regular expressions for ISO 8601 parsing. Returns the
# requested regexp. $rx can be:
# cdate : regular expression for a complete date
# tdate : regular expression for a truncated date
# ctime : regular expression for a complete time
# ttime : regular expression for a truncated time
# date : regular expression for a date only
# time : regular expression for a time only
# UNDEF : regular expression for a valid date and/or time
#
# Date matches are:
# y m d doy w dow yod c
# Time matches are:
# h h24 mn s fh fm
#
sub _iso8601_rx {
my($self,$rx) = @_;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
return $$dmb{'data'}{'rx'}{'iso'}{$rx}
if (exists $$dmb{'data'}{'rx'}{'iso'}{$rx});
if ($rx eq 'cdate' || $rx eq 'tdate') {
my $y4 = '(?<y>\d\d\d\d)';
my $y2 = '(?<y>\d\d)';
my $m = '(?<m>0[1-9]|1[0-2])';
my $d = '(?<d>0[1-9]|[12][0-9]|3[01])';
my $doy = '(?<doy>00[1-9]|0[1-9][0-9]|[1-2][0-9][0-9]|3[0-5][0-9]|36[0-6])';
my $w = '(?<w>0[1-9]|[1-4][0-9]|5[0-3])';
my $dow = '(?<dow>[1-7])';
my $yod = '(?<yod>\d)';
my $cc = '(?<c>\d\d)';
my $cdaterx =
"${y4}${m}${d}|" . # CCYYMMDD
"${y4}\\-${m}\\-${d}|" . # CCYY-MM-DD
"\\-${y2}${m}${d}|" . # -YYMMDD
"\\-${y2}\\-${m}\\-${d}|" . # -YY-MM-DD
"\\-?${y2}${m}${d}|" . # YYMMDD
"\\-?${y2}\\-${m}\\-${d}|" . # YY-MM-DD
"\\-\\-${m}\\-?${d}|" . # --MM-DD --MMDD
"\\-\\-\\-${d}|" . # ---DD
"${y4}\\-?${doy}|" . # CCYY-DoY CCYYDoY
"\\-?${y2}\\-?${doy}|" . # YY-DoY -YY-DoY
# YYDoY -YYDoY
"\\-${doy}|" . # -DoY
"${y4}W${w}${dow}|" . # CCYYWwwD
"${y4}\\-W${w}\\-${dow}|" . # CCYY-Www-D
"\\-?${y2}W${w}${dow}|" . # YYWwwD -YYWwwD
"\\-?${y2}\\-W${w}\\-${dow}|" . # YY-Www-D -YY-Www-D
"\\-?${yod}W${w}${dow}|" . # YWwwD -YWwwD
"\\-?${yod}\\-W${w}\\-${dow}|" . # Y-Www-D -Y-Www-D
"\\-W${w}\\-?${dow}|" . # -Www-D -WwwD
"\\-W\\-${dow}|" . # -W-D
"\\-\\-\\-${dow}"; # ---D
$cdaterx = qr/(?:$cdaterx)/i;
my $tdaterx =
"${y4}\\-${m}|" . # CCYY-MM
"${y4}|" . # CCYY
"\\-${y2}\\-?${m}|" . # -YY-MM -YYMM
"\\-${y2}|" . # -YY
"\\-\\-${m}|" . # --MM
"${y4}\\-?W${w}|" . # CCYYWww CCYY-Www
"\\-?${y2}\\-?W${w}|" . # YY-Www YYWww
# -YY-Www -YYWww
"\\-?W${w}|" . # -Www Www
"${cc}"; # CC
$tdaterx = qr/(?:$tdaterx)/i;
$$dmb{'data'}{'rx'}{'iso'}{'cdate'} = $cdaterx;
$$dmb{'data'}{'rx'}{'iso'}{'tdate'} = $tdaterx;
} elsif ($rx eq 'ctime' || $rx eq 'ttime') {
my $hh = '(?<h>[0-1][0-9]|2[0-3])';
my $mn = '(?<mn>[0-5][0-9])';
my $ss = '(?<s>[0-5][0-9])';
my $h24a = '(?<h24>24(?::00){0,2})';
my $h24b = '(?<h24>24(?:00){0,2})';
my $h = '(?<h>[0-9])';
my $fh = '(?:[\.,](?<fh>\d*))'; # fractional hours (keep)
my $fm = '(?:[\.,](?<fm>\d*))'; # fractional seconds (keep)
my $fs = '(?:[\.,]\d*)'; # fractional hours (discard)
my $zrx = $dmt->_zrx('zrx');
my $ctimerx =
"${hh}${mn}${ss}${fs}?|" . # HHMNSS[,S+]
"${hh}:${mn}:${ss}${fs}?|" . # HH:MN:SS[,S+]
"${hh}:?${mn}${fm}|" . # HH:MN,M+ HHMN,M+
"${hh}${fh}|" . # HH,H+
"\\-${mn}:?${ss}${fs}?|" . # -MN:SS[,S+] -MNSS[,S+]
"\\-${mn}${fm}|" . # -MN,M+
"\\-\\-${ss}${fs}?|" . # --SS[,S+]
"${hh}:?${mn}|" . # HH:MN HHMN
"${h24a}|" . # 24:00:00 24:00 24
"${h24b}|" . # 240000 2400
"${h}:${mn}:${ss}${fs}?|" . # H:MN:SS[,S+]
"${h}:${mn}${fm}"; # H:MN,M+
$ctimerx = qr/(?:$ctimerx)(?:\s*$zrx)?/;
my $ttimerx =
"${hh}|" . # HH
"\\-${mn}"; # -MN
$ttimerx = qr/(?:$ttimerx)/;
$$dmb{'data'}{'rx'}{'iso'}{'ctime'} = $ctimerx;
$$dmb{'data'}{'rx'}{'iso'}{'ttime'} = $ttimerx;
} elsif ($rx eq 'date') {
my $cdaterx = $self->_iso8601_rx('cdate');
my $tdaterx = $self->_iso8601_rx('tdate');
$$dmb{'data'}{'rx'}{'iso'}{'date'} = qr/(?:$cdaterx|$tdaterx)/;
} elsif ($rx eq 'time') {
my $ctimerx = $self->_iso8601_rx('ctime');
my $ttimerx = $self->_iso8601_rx('ttime');
$$dmb{'data'}{'rx'}{'iso'}{'time'} = qr/(?:$ctimerx|$ttimerx)/;
} elsif ($rx eq 'fulldate') {
# A parseable string contains:
# a complete date and complete time
# a complete date and truncated time
# a truncated date
# a complete time
# a truncated time
# If the string contains both a time and date, they may be adjacent
# or separated by:
# whitespace
# T (which must be followed by a number)
# a dash
my $cdaterx = $self->_iso8601_rx('cdate');
my $tdaterx = $self->_iso8601_rx('tdate');
my $ctimerx = $self->_iso8601_rx('ctime');
my $ttimerx = $self->_iso8601_rx('ttime');
my $sep = qr/(?:T|\-|\s*)/i;
my $daterx = qr/^\s*(?: $cdaterx(?:$sep(?:$ctimerx|$ttimerx))? |
$tdaterx |
$ctimerx |
$ttimerx
)\s*$/x;
$$dmb{'data'}{'rx'}{'iso'}{'fulldate'} = $daterx;
}
return $$dmb{'data'}{'rx'}{'iso'}{$rx};
}
sub _parse_datetime_iso8601 {
my($self,$string,$noupdate) = @_;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
my $daterx = $self->_iso8601_rx('fulldate');
my($y,$m,$d,$h,$mn,$s,$tzstring,$zone,$abb,$off);
my($doy,$dow,$yod,$c,$w,$fh,$fm,$h24);
if ($string =~ $daterx) {
($y,$m,$d,$h,$mn,$s,$doy,$dow,$yod,$c,$w,$fh,$fm,$h24,
$tzstring,$zone,$abb,$off) =
@+{qw(y m d h mn s doy dow yod c w fh fm h24 tzstring zone abb off)};
if (defined $w || defined $dow) {
($y,$m,$d) = $self->_def_date_dow($y,$w,$dow,$noupdate);
} elsif (defined $doy) {
($y,$m,$d) = $self->_def_date_doy($y,$doy,$noupdate);
} else {
$y = $c . '00' if (defined $c);
($y,$m,$d) = $self->_def_date($y,$m,$d,$noupdate);
}
($h,$mn,$s) = $self->_time($h,$mn,$s,$fh,$fm,$h24,undef,$noupdate);
} else {
return (0);
}
return (1,$y,$m,$d,$h,$mn,$s,$tzstring,$zone,$abb,$off);
}
sub _parse_date_iso8601 {
my($self,$string,$noupdate) = @_;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
my $daterx = $self->_iso8601_rx('date');
my($y,$m,$d);
my($doy,$dow,$yod,$c,$w);
if ($string =~ /^$daterx$/) {
($y,$m,$d,$doy,$dow,$yod,$c,$w) =
@+{qw(y m d doy dow yod c w)};
if (defined $w || defined $dow) {
($y,$m,$d) = $self->_def_date_dow($y,$w,$dow,$noupdate);
} elsif (defined $doy) {
($y,$m,$d) = $self->_def_date_doy($y,$doy,$noupdate);
} else {
$y = $c . '00' if (defined $c);
($y,$m,$d) = $self->_def_date($y,$m,$d,$noupdate);
}
} else {
return (0);
}
return (1,$y,$m,$d);
}
# Handle all of the time fields.
#
no integer;
sub _time {
my($self,$h,$mn,$s,$fh,$fm,$h24,$ampm,$noupdate) = @_;
if (defined($ampm) && $ampm) {
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
if ($$dmb{'data'}{'wordmatch'}{'ampm'}{lc($ampm)} == 2) {
# pm times
$h+=12 unless ($h==12);
} else {
# am times
$h=0 if ($h==12);
}
}
if (defined $h24) {
return(24,0,0);
} elsif (defined $fh && $fh ne "") {
$fh = "0.$fh";
$s = int($fh * 3600);
$mn = int($s/60);
$s -= $mn*60;
} elsif (defined $fm && $fm ne "") {
$fm = "0.$fm";
$s = int($fm*60);
}
($h,$mn,$s) = $self->_def_time($h,$mn,$s,$noupdate);
return($h,$mn,$s);
}
use integer;
# Set up the regular expressions for other date and time formats. Returns the
# requested regexp.
#
sub _other_rx {
my($self,$rx) = @_;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
$rx = '_' if (! defined $rx);
if ($rx eq 'time') {
my $h24 = '(?<h>2[0-3]|1[0-9]|0?[0-9])'; # 0-23 00-23
my $h12 = '(?<h>1[0-2]|0?[1-9])'; # 1-12 01-12
my $mn = '(?<mn>[0-5][0-9])'; # 00-59
my $ss = '(?<s>[0-5][0-9])'; # 00-59
# how to express fractions
my($f1,$f2,$sepfr);
if (exists $$dmb{'data'}{'rx'}{'sepfr'} &&
$$dmb{'data'}{'rx'}{'sepfr'}) {
$sepfr = $$dmb{'data'}{'rx'}{'sepfr'};
} else {
$sepfr = '';
}
if ($sepfr) {
$f1 = "(?:[.,]|$sepfr)";
$f2 = "(?:[.,:]|$sepfr)";
} else {
$f1 = "[.,]";
$f2 = "[.,:]";
}
my $fh = "(?:$f1(?<fh>\\d*))"; # fractional hours (keep)
my $fm = "(?:$f1(?<fm>\\d*))"; # fractional minutes (keep)
my $fs = "(?:$f2\\d*)"; # fractional seconds
# AM/PM
my($ampm);
if (exists $$dmb{'data'}{'rx'}{'ampm'}) {
$ampm = "(?:\\s*(?<ampm>$$dmb{data}{rx}{ampm}[0]))";
}
# H:MN and MN:S separators
my @hm = ("\Q:\E");
my @ms = ("\Q:\E");
if ($dmb->_config('periodtimesep')) {
push(@hm,"\Q.\E");
push(@ms,"\Q.\E");
}
if (exists $$dmb{'data'}{'rx'}{'sephm'} &&
defined $$dmb{'data'}{'rx'}{'sephm'} &&
exists $$dmb{'data'}{'rx'}{'sepms'} &&
defined $$dmb{'data'}{'rx'}{'sepms'}) {
push(@hm,@{ $$dmb{'data'}{'rx'}{'sephm'} });
push(@ms,@{ $$dmb{'data'}{'rx'}{'sepms'} });
}
# How to express the time
# matches = (H, FH, MN, FMN, S, AM, TZSTRING, ZONE, ABB, OFF, ABB)
my $timerx;
for (my $i=0; $i<=$#hm; $i++) {
my $hm = $hm[$i];
my $ms = $ms[$i];
$timerx .= "${h12}$hm${mn}$ms${ss}${fs}?${ampm}?|" # H12:MN:SS[,S+] [AM]
if ($ampm);
$timerx .= "${h24}$hm${mn}$ms${ss}${fs}?|" . # H24:MN:SS[,S+]
"(?<h>24)$hm(?<mn>00)$ms(?<s>00)|"; # 24:00:00
}
for (my $i=0; $i<=$#hm; $i++) {
my $hm = $hm[$i];
my $ms = $ms[$i];
$timerx .= "${h12}$hm${mn}${fm}${ampm}?|" # H12:MN,M+ [AM]
if ($ampm);
$timerx .= "${h24}$hm${mn}${fm}|"; # H24:MN,M+
}
for (my $i=0; $i<=$#hm; $i++) {
my $hm = $hm[$i];
my $ms = $ms[$i];
$timerx .= "${h12}$hm${mn}${ampm}?|" # H12:MN [AM]
if ($ampm);
$timerx .= "${h24}$hm${mn}|" . # H24:MN
"(?<h>24)$hm(?<mn>00)|"; # 24:00
}
$timerx .= "${h12}${fh}${ampm}|" # H12,H+ AM
if ($ampm);
$timerx .= "${h12}${ampm}|" if ($ampm); # H12 AM
$timerx .= "${h24}${fh}|"; # H24,H+
chop($timerx); # remove trailing pipe
my $zrx = $dmt->_zrx('zrx');
my $at = $$dmb{'data'}{'rx'}{'at'};
my $atrx = qr/(?:^|\s+)(?:$at)\s+/;
$timerx = qr/(?:$atrx|^|\s+)(?:$timerx)(?:\s*$zrx)?(?:\s+|$)/i;
$$dmb{'data'}{'rx'}{'other'}{$rx} = $timerx;
} elsif ($rx eq 'common_1') {
# These are of the format M/D/Y
# Do NOT replace <m> and <d> with a regular expression to
# match 1-12 since the DateFormat config may reverse the two.
my $y4 = '(?<y>\d\d\d\d)';
my $y2 = '(?<y>\d\d)';
my $m = '(?<m>\d\d?)';
my $d = '(?<d>\d\d?)';
my $sep = '(?<sep>[\s\.\/\-])';
my $daterx =
"${m}${sep}${d}\\k<sep>$y4|" . # M/D/YYYY
"${m}${sep}${d}\\k<sep>$y2|" . # M/D/YY
"${m}${sep}${d}"; # M/D
$daterx = qr/^\s*(?:$daterx)\s*$/;
$$dmb{'data'}{'rx'}{'other'}{$rx} = $daterx;
} elsif ($rx eq 'common_2') {
my $abb = $$dmb{'data'}{'rx'}{'month_abb'}[0];
my $nam = $$dmb{'data'}{'rx'}{'month_name'}[0];
my $y4 = '(?<y>\d\d\d\d)';
my $y2 = '(?<y>\d\d)';
my $m = '(?<m>\d\d?)';
my $d = '(?<d>\d\d?)';
my $dd = '(?<d>\d\d)';
my $mmm = "(?:(?<mmm>$abb)|(?<month>$nam))";
my $sep = '(?<sep>[\s\.\/\-])';
my $daterx =
"${y4}${sep}${m}\\k<sep>$d|" . # YYYY/M/D
"${mmm}\\s*${dd}\\s*${y4}|" . # mmmDDYYYY
"${mmm}\\s*${dd}\\s*${y2}|" . # mmmDDYY
"${mmm}\\s*${d}|" . # mmmD
"${d}\\s*${mmm}\\s*${y4}|" . # DmmmYYYY
"${d}\\s*${mmm}\\s*${y2}|" . # DmmmYY
"${d}\\s*${mmm}|" . # Dmmm
"${y4}\\s*${mmm}\\s*${d}|" . # YYYYmmmD
"${mmm}${sep}${d}\\k<sep>${y4}|" . # mmm/D/YYYY
"${mmm}${sep}${d}\\k<sep>${y2}|" . # mmm/D/YY
"${mmm}${sep}${d}|" . # mmm/D
"${d}${sep}${mmm}\\k<sep>${y4}|" . # D/mmm/YYYY
"${d}${sep}${mmm}\\k<sep>${y2}|" . # D/mmm/YY
"${d}${sep}${mmm}|" . # D/mmm
"${y4}${sep}${mmm}\\k<sep>${d}|" . # YYYY/mmm/D
"${mmm}${sep}?${d}\\s+${y2}|" . # mmmD YY mmm/D YY
"${mmm}${sep}?${d}\\s+${y4}|" . # mmmD YYYY mmm/D YYYY
"${d}${sep}?${mmm}\\s+${y2}|" . # Dmmm YY D/mmm YY
"${d}${sep}?${mmm}\\s+${y4}|" . # Dmmm YYYY D/mmm YYYY
"${y2}\\s+${mmm}${sep}?${d}|" . # YY mmmD YY mmm/D
"${y4}\\s+${mmm}${sep}?${d}|" . # YYYY mmmD YYYY mmm/D
"${y2}\\s+${d}${sep}?${mmm}|" . # YY Dmmm YY D/mmm
"${y4}\\s+${d}${sep}?${mmm}|" . # YYYY Dmmm YYYY D/mmm
"${y4}:${m}:${d}"; # YYYY:MM:DD
$daterx = qr/^\s*(?:$daterx)\s*$/i;
$$dmb{'data'}{'rx'}{'other'}{$rx} = $daterx;
} elsif ($rx eq 'dow') {
my $day_abb = $$dmb{'data'}{'rx'}{'day_abb'}[0];
my $day_name = $$dmb{'data'}{'rx'}{'day_name'}[0];
my $on = $$dmb{'data'}{'rx'}{'on'};
my $onrx = qr/(?:^|\s+)(?:$on)\s+/;
my $dowrx = qr/(?:$onrx|^|\s+)(?<dow>$day_name|$day_abb)($|\s+)/i;
$$dmb{'data'}{'rx'}{'other'}{$rx} = $dowrx;
} elsif ($rx eq 'ignore') {
my $of = $$dmb{'data'}{'rx'}{'of'};
my $ignrx = qr/(?:^|\s+)(?<of>$of)(\s+|$)/;
$$dmb{'data'}{'rx'}{'other'}{$rx} = $ignrx;
} elsif ($rx eq 'miscdatetime') {
my $special = $$dmb{'data'}{'rx'}{'offset_time'}[0];
$special = "(?<special>$special)";
my $secs = "(?<epoch>[-+]?\\d+)";
my $abb = $$dmb{'data'}{'rx'}{'month_abb'}[0];
my $mmm = "(?<mmm>$abb)";
my $y4 = '(?<y>\d\d\d\d)';
my $dd = '(?<d>\d\d)';
my $h24 = '(?<h>2[0-3]|[01][0-9])'; # 00-23
my $mn = '(?<mn>[0-5][0-9])'; # 00-59
my $ss = '(?<s>[0-5][0-9])'; # 00-59
my $offrx = $dmt->_zrx('offrx');
my $zrx = $dmt->_zrx('zrx');
my $daterx =
"${special}|" . # now
"${special}\\s+${zrx}|" . # now EDT
"epoch\\s+$secs|" . # epoch SECS
"epoch\\s+$secs\\s+${zrx}|" . # epoch SECS EDT
"${dd}\\/${mmm}\\/${y4}:${h24}:${mn}:${ss}\\s*${offrx}";
# Common log format: 10/Oct/2000:13:55:36 -0700
$daterx = qr/^\s*(?:$daterx)\s*$/i;
$$dmb{'data'}{'rx'}{'other'}{$rx} = $daterx;
} elsif ($rx eq 'misc') {
my $abb = $$dmb{'data'}{'rx'}{'month_abb'}[0];
my $nam = $$dmb{'data'}{'rx'}{'month_name'}[0];
my $next = $$dmb{'data'}{'rx'}{'nextprev'}[0];
my $last = $$dmb{'data'}{'rx'}{'last'};
my $yf = $$dmb{data}{rx}{fields}[1];
my $mf = $$dmb{data}{rx}{fields}[2];
my $wf = $$dmb{data}{rx}{fields}[3];
my $df = $$dmb{data}{rx}{fields}[4];
my $nth = $$dmb{'data'}{'rx'}{'nth'}[0];
my $nth_wom = $$dmb{'data'}{'rx'}{'nth_wom'}[0];
my $special = $$dmb{'data'}{'rx'}{'offset_date'}[0];
my $y = '(?:(?<y>\d\d\d\d)|(?<y>\d\d))';
my $mmm = "(?:(?<mmm>$abb)|(?<month>$nam))";
$next = "(?<next>$next)";
$last = "(?<last>$last)";
$yf = "(?<field_y>$yf)";
$mf = "(?<field_m>$mf)";
$wf = "(?<field_w>$wf)";
$df = "(?<field_d>$df)";
my $fld = "(?:$yf|$mf|$wf)";
$nth = "(?<nth>$nth)";
$nth_wom = "(?<nth>$nth_wom)";
$special = "(?<special>$special)";
my $daterx =
"${mmm}\\s+${nth}\\s*$y?|" . # Dec 1st [1970]
"${nth}\\s+${mmm}\\s*$y?|" . # 1st Dec [1970]
"$y\\s+${mmm}\\s+${nth}|" . # 1970 Dec 1st
"$y\\s+${nth}\\s+${mmm}|" . # 1970 1st Dec
"${next}\\s+${fld}|" . # next year, next month, next week
"${next}|" . # next friday
"${last}\\s+${mmm}\\s*$y?|" . # last friday in october 95
"${last}\\s+${df}\\s+${mmm}\\s*$y?|" .
# last day in october 95
"${last}\\s*$y?|" . # last friday in 95
"${nth_wom}\\s+${mmm}\\s*$y?|" .
# nth DoW in MMM [YYYY]
"${nth}\\s*$y?|" . # nth DoW in [YYYY]
"${nth}\\s+$df\\s+${mmm}\\s*$y?|" .
# nth day in MMM [YYYY]
"${nth}\\s+${wf}\\s*$y?|" . # DoW Nth week [YYYY]
"${wf}\\s+(?<n>\\d+)\\s*$y?|" . # DoW week N [YYYY]
"${special}|" . # today, tomorrow
"${special}\\s+${wf}|" . # today week
# British: same as 1 week from today
"${nth}|" . # nth
"${wf}"; # monday week
# British: same as 'in 1 week on monday'
$daterx = qr/^\s*(?:$daterx)\s*$/i;
$$dmb{'data'}{'rx'}{'other'}{$rx} = $daterx;
}
return $$dmb{'data'}{'rx'}{'other'}{$rx};
}
sub _parse_time {
my($self,$caller,$string,$noupdate,%opts) = @_;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
my($timerx,$h,$mn,$s,$fh,$fm,$h24,$ampm,$tzstring,$zone,$abb,$off);
my $got_time = 0;
# Check for ISO 8601 time
#
# This is only called via. parse_time (parse_date uses a regexp
# that matches a full ISO 8601 date/time instead of parsing them
# separately. Since some ISO 8601 times are a substring of non-ISO
# 8601 times (i.e. 12:30 is a substring of '12:30 PM'), we need to
# match entire strings here.
if ($caller eq 'parse_time') {
$timerx = (exists $$dmb{'data'}{'rx'}{'iso'}{'time'} ?
$$dmb{'data'}{'rx'}{'iso'}{'time'} :
$self->_iso8601_rx('time'));
if (! exists $opts{'noiso8601'}) {
if ($string =~ s/^\s*$timerx\s*$//) {
($h,$fh,$mn,$fm,$s,$ampm,$tzstring,$zone,$abb,$off) =
@+{qw(h fh mn fm s ampm tzstring zone abb off)};
($h,$mn,$s) = $self->_def_time($h,$mn,$s,$noupdate);
$h24 = 1 if ($h == 24 && $mn == 0 && $s == 0);
$string =~ s/\s*$//;
$got_time = 1;
}
}
}
# Make time substitutions (i.e. noon => 12:00:00)
if (! $got_time &&
! exists $opts{'noother'}) {
my @rx = @{ $$dmb{'data'}{'rx'}{'times'} };
shift(@rx);
foreach my $rx (@rx) {
if ($string =~ $rx) {
my $repl = $$dmb{'data'}{'wordmatch'}{'times'}{lc($1)};
$string =~ s/$rx/$repl/g;
}
}
}
# Check to see if there is a time in the string
if (! $got_time) {
$timerx = (exists $$dmb{'data'}{'rx'}{'other'}{'time'} ?
$$dmb{'data'}{'rx'}{'other'}{'time'} :
$self->_other_rx('time'));
if ($string =~ s/$timerx/ /) {
($h,$fh,$mn,$fm,$s,$ampm,$tzstring,$zone,$abb,$off) =
@+{qw(h fh mn fm s ampm tzstring zone abb off)};
($h,$mn,$s) = $self->_def_time($h,$mn,$s,$noupdate);
$h24 = 1 if ($h == 24 && $mn == 0 && $s == 0);
$string =~ s/\s*$//;
$got_time = 1;
}
}
# If we called this from $date->parse()
# returns the string and a list of time components
if ($caller eq 'parse') {
if ($got_time) {
($h,$mn,$s) = $self->_time($h,$mn,$s,$fh,$fm,$h24,$ampm,$noupdate);
return ($got_time,$string,$h,$mn,$s,$tzstring,$zone,$abb,$off);
} else {
return (0);
}
}
# If we called this from $date->parse_time()
if (! $got_time || $string) {
$$self{'err'} = "[$caller] Invalid time string";
return ();
}
($h,$mn,$s) = $self->_time($h,$mn,$s,$fh,$fm,$h24,$ampm,$noupdate);
return ($h,$mn,$s,$tzstring,$zone,$abb,$off);
}
# Parse common dates
sub _parse_date_common {
my($self,$string,$noupdate) = @_;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
# Since we want whitespace to be used as a separator, turn all
# whitespace into single spaces. This is necessary since the
# regexps do backreferences to make sure that separators are
# not mixed.
$string =~ s/\s+/ /g;
my $daterx = (exists $$dmb{'data'}{'rx'}{'other'}{'common_1'} ?
$$dmb{'data'}{'rx'}{'other'}{'common_1'} :
$self->_other_rx('common_1'));
if ($string =~ $daterx) {
my($y,$m,$d) = @+{qw(y m d)};
if ($dmb->_config('dateformat') ne 'US') {
($m,$d) = ($d,$m);
}
($y,$m,$d) = $self->_def_date($y,$m,$d,$noupdate);
return($y,$m,$d);
}
$daterx = (exists $$dmb{'data'}{'rx'}{'other'}{'common_2'} ?
$$dmb{'data'}{'rx'}{'other'}{'common_2'} :
$self->_other_rx('common_2'));
if ($string =~ $daterx) {
my($y,$m,$d,$mmm,$month) = @+{qw(y m d mmm month)};
if ($mmm) {
$m = $$dmb{'data'}{'wordmatch'}{'month_abb'}{lc($mmm)};
} elsif ($month) {
$m = $$dmb{'data'}{'wordmatch'}{'month_name'}{lc($month)};
}
($y,$m,$d) = $self->_def_date($y,$m,$d,$noupdate);
return($y,$m,$d);
}
return ();
}
sub _parse_tz {
my($self,$string,$noupdate) = @_;
my $dmt = $$self{'tz'};
my($tzstring,$zone,$abb,$off);
my $rx = $dmt->_zrx('zrx');
if ($string =~ s/(?:^|\s)$rx(?:$|\s)/ /) {
($tzstring,$zone,$abb,$off) = @+{qw(tzstring zone abb off)};
return($string,$tzstring,$zone,$abb,$off);
}
return($string);
}
sub _parse_dow {
my($self,$string,$noupdate) = @_;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
my($y,$m,$d,$dow);
# Remove the day of week
my $rx = (exists $$dmb{'data'}{'rx'}{'other'}{'dow'} ?
$$dmb{'data'}{'rx'}{'other'}{'dow'} :
$self->_other_rx('dow'));
if ($string =~ s/$rx/ /) {
$dow = $+{'dow'};
$dow = lc($dow);
$dow = $$dmb{'data'}{'wordmatch'}{'day_abb'}{$dow}
if (exists $$dmb{'data'}{'wordmatch'}{'day_abb'}{$dow});
$dow = $$dmb{'data'}{'wordmatch'}{'day_name'}{$dow}
if (exists $$dmb{'data'}{'wordmatch'}{'day_name'}{$dow});
} else {
return (0);
}
$string =~ s/\s*$//;
$string =~ s/^\s*//;
return (0,$string,$dow) if ($string);
# Handle the simple DoW format
($y,$m,$d) = $self->_def_date($y,$m,$d,$noupdate);
my($w,$dow1);
($y,$w) = $dmb->week_of_year([$y,$m,$d]); # week of year
($y,$m,$d) = @{ $dmb->week_of_year($y,$w) }; # first day
$dow1 = $dmb->day_of_week([$y,$m,$d]); # DoW of first day
$dow1 -= 7 if ($dow1 > $dow);
($y,$m,$d) = @{ $dmb->calc_date_days([$y,$m,$d],$dow-$dow1) };
return(1,$y,$m,$d);
}
sub _parse_holidays {
my($self,$string,$noupdate) = @_;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
my($y,$m,$d);
if (! exists $$dmb{'data'}{'rx'}{'holidays'}) {
return (0);
}
$string =~ s/\s*$//;
$string =~ s/^\s*//;
my $rx = $$dmb{'data'}{'rx'}{'holidays'};
if ($string =~ $rx) {
my $hol;
($y,$hol) = @+{qw(y holiday)};
$y = $dmt->_now('y',$noupdate) if (! $y);
$y += 0;
$self->_holidays($y,2);
return (0) if (! exists $$dmb{'data'}{'holidays'}{'dates'}{$y});
foreach my $m (keys %{ $$dmb{'data'}{'holidays'}{'dates'}{$y} }) {
foreach my $d (keys %{ $$dmb{'data'}{'holidays'}{'dates'}{$y}{$m} }) {
foreach my $nam (@{ $$dmb{'data'}{'holidays'}{'dates'}{$y}{$m}{$d} }) {
if (lc($nam) eq lc($hol)) {
return(1,$y,$m,$d);
}
}
}
}
}
return (0);
}
sub _parse_delta {
my($self,$string,$dow,$got_time,$h,$mn,$s,$noupdate) = @_;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
my($y,$m,$d);
my $delta = $self->new_delta();
my $err = $delta->parse($string);
my $tz = $dmt->_now('tz');
my $isdst = $dmt->_now('isdst');
if (! $err) {
my($dy,$dm,$dw,$dd,$dh,$dmn,$ds) = @{ $$delta{'data'}{'delta'} };
if ($got_time &&
($dh != 0 || $dmn != 0 || $ds != 0)) {
$$self{'err'} = '[parse] Two times entered or implied';
return (1);
}
if ($got_time) {
($y,$m,$d) = $self->_def_date($y,$m,$d,$noupdate);
} else {
($y,$m,$d,$h,$mn,$s) = $dmt->_now('now',$$noupdate);
$$noupdate = 1;
}
my $business = $$delta{'data'}{'business'};
my($date2,$offset,$abbrev);
($err,$date2,$offset,$isdst,$abbrev) =
$self->__calc_date_delta([$y,$m,$d,$h,$mn,$s],
[$dy,$dm,$dw,$dd,$dh,$dmn,$ds],
0,$business,$tz,$isdst);
($y,$m,$d,$h,$mn,$s) = @$date2;
if ($dow) {
if ($dd != 0 || $dh != 0 || $dmn != 0 || $ds != 0) {
$$self{'err'} = '[parse] Day of week not allowed';
return (1);
}
my($w,$dow1);
($y,$w) = $dmb->week_of_year([$y,$m,$d]); # week of year
($y,$m,$d) = @{ $dmb->week_of_year($y,$w) }; # first day
$dow1 = $dmb->day_of_week([$y,$m,$d]); # DoW of first day
$dow1 -= 7 if ($dow1 > $dow);
($y,$m,$d) = @{ $dmb->calc_date_days([$y,$m,$d],$dow-$dow1) };
}
return (1,$y,$m,$d,$h,$mn,$s);
}
return (0);
}
sub _parse_datetime_other {
my($self,$string,$noupdate) = @_;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
my $rx = (exists $$dmb{'data'}{'rx'}{'other'}{'miscdatetime'} ?
$$dmb{'data'}{'rx'}{'other'}{'miscdatetime'} :
$self->_other_rx('miscdatetime'));
if ($string =~ $rx) {
my ($special,$epoch,$y,$mmm,$d,$h,$mn,$s,$tzstring,$zone,$abb,$off) =
@+{qw(special epoch y mmm d h mn s tzstring zone abb off)};
if ($tzstring) {
}
if (defined($special)) {
my $delta = $$dmb{'data'}{'wordmatch'}{'offset_time'}{lc($special)};
my @delta = @{ $dmb->split('delta',$delta) };
my @date = $dmt->_now('now',$$noupdate);
my $tz = $dmt->_now('tz');
my $isdst = $dmt->_now('isdst');
$$noupdate = 1;
my($err,$date2,$offset,$abbrev);
($err,$date2,$offset,$isdst,$abbrev) =
$self->__calc_date_delta([@date],[@delta],0,0,$tz,$isdst);
if ($tzstring) {
my(@args);
push(@args,$zone) if ($zone);
push(@args,$abb) if ($abb);
push(@args,$off) if ($off);
push(@args,$date2);
$zone = $dmt->zone(@args);
return (0) if (! $zone);
my(@tmp) = $dmt->_convert('_parse_datetime_other',$date2,$tz,$zone);
$date2 = $tmp[1];
}
@date = @$date2;
return (1,@date,$tzstring,$zone,$abb,$off);
} elsif (defined($epoch)) {
my $date = [1970,1,1,0,0,0];
my @delta = (0,0,$epoch);
$date = $dmb->calc_date_time($date,\@delta);
my($err);
if ($tzstring) {
my(@args);
push(@args,$zone) if ($zone);
push(@args,$abb) if ($abb);
push(@args,$off) if ($off);
push(@args,$date);
$zone = $dmt->zone(@args);
return (0) if (! $zone);
($err,$date) = $dmt->convert_from_gmt($date,$zone);
} else {
($err,$date) = $dmt->convert_from_gmt($date);
}
return (1,@$date,$tzstring,$zone,$abb,$off);
} elsif (defined($y)) {
my $m = $$dmb{'data'}{'wordmatch'}{'month_abb'}{lc($mmm)};
return (1,$y,$m,$d,$h,$mn,$s,$tzstring,$zone,$abb,$off);
}
}
return (0);
}
sub _parse_date_other {
my($self,$string,$dow,$of,$noupdate) = @_;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
my($y,$m,$d,$h,$mn,$s);
my $rx = (exists $$dmb{'data'}{'rx'}{'other'}{'misc'} ?
$$dmb{'data'}{'rx'}{'other'}{'misc'} :
$self->_other_rx('misc'));
my($mmm,$month,$nextprev,$last,$field_y,$field_m,$field_w,$field_d,$nth);
my($special,$got_m,$n,$got_y);
if ($string =~ $rx) {
($y,$mmm,$month,$nextprev,$last,$field_y,$field_m,$field_w,$field_d,$nth,
$special,$n) =
@+{qw(y mmm month next last field_y field_m field_w field_d
nth special n)};
if (defined($y)) {
$y = $dmt->_fix_year($y);
$got_y = 1;
return () if (! $y);
} else {
$y = $dmt->_now('y',$$noupdate);
$$noupdate = 1;
$got_y = 0;
$$self{'data'}{'def'}[0] = '';
}
if (defined($mmm)) {
$m = $$dmb{'data'}{'wordmatch'}{'month_abb'}{lc($mmm)};
$got_m = 1;
} elsif ($month) {
$m = $$dmb{'data'}{'wordmatch'}{'month_name'}{lc($month)};
$got_m = 1;
}
if ($nth) {
$nth = $$dmb{'data'}{'wordmatch'}{'nth'}{lc($nth)};
}
if ($got_m && $nth && ! $dow) {
# Dec 1st 1970
# 1st Dec 1970
# 1970 Dec 1st
# 1970 1st Dec
$d = $nth;
} elsif ($nextprev) {
my $next = 0;
my $sign = -1;
if ($$dmb{'data'}{'wordmatch'}{'nextprev'}{lc($nextprev)} == 1) {
$next = 1;
$sign = 1;
}
if ($field_y || $field_m || $field_w) {
# next/prev year/month/week
my(@delta);
if ($field_y) {
@delta = ($sign*1,0,0,0,0,0,0);
} elsif ($field_m) {
@delta = (0,$sign*1,0,0,0,0,0);
} else {
@delta = (0,0,$sign*1,0,0,0,0);
}
my @now = $dmt->_now('now',$$noupdate);
my $tz = $dmt->_now('tz');
my $isdst = $dmt->_now('isdst');
$$noupdate = 1;
my($err,$offset,$abbrev,$date2);
($err,$date2,$offset,$isdst,$abbrev) =
$self->__calc_date_delta([@now],[@delta],0,0,$tz,$isdst);
($y,$m,$d,$h,$mn,$s) = @$date2;
} elsif ($dow) {
# next/prev friday
my @now = $dmt->_now('now',$$noupdate);
$$noupdate = 1;
($y,$m,$d,$h,$mn,$s) = @{ $self->__next_prev(\@now,$next,$dow,0) };
$dow = 0;
} else {
return ();
}
} elsif ($last) {
if ($field_d && $got_m) {
# last day in october 95
$d = $dmb->days_in_month($y,$m);
} elsif ($dow && $got_m) {
# last friday in october 95
$d = $dmb->days_in_month($y,$m);
($y,$m,$d,$h,$mn,$s) =
@{ $self->__next_prev([$y,$m,$d,0,0,0],0,$dow,1) };
$dow = 0;
} elsif ($dow) {
# last friday in 95
($y,$m,$d,$h,$mn,$s) =
@{ $self->__next_prev([$y,12,31,0,0,0],0,$dow,0) };
} else {
return ();
}
} elsif ($nth && $dow && ! $field_w) {
if ($got_m) {
if ($of) {
# nth DoW of MMM [YYYY]
return () if ($nth > 5);
$d = 1;
($y,$m,$d,$h,$mn,$s) =
@{ $self->__next_prev([$y,$m,1,0,0,0],1,$dow,1) };
my $m2 = $m;
($y,$m2,$d) = @{ $dmb->calc_date_days([$y,$m,$d],7*($nth-1)) }
if ($nth > 1);
return () if (! $m2 || $m2 != $m);
} else {
# DoW, nth MMM [YYYY] (i.e. Sunday, 9th Dec 2008)
$d = $nth;
}
} else {
# nth DoW [in YYYY]
($y,$m,$d,$h,$mn,$s) = @{ $self->__next_prev([$y,1,1,0,0,0],1,$dow,1) };
($y,$m,$d) = @{ $dmb->calc_date_days([$y,$m,$d],7*($nth-1)) }
if ($nth > 1);
}
} elsif ($field_w && $dow) {
if (defined($n) || $nth) {
# sunday week 22 in 1996
# sunday 22nd week in 1996
$n = $nth if ($nth);
return () if (! $n);
($y,$m,$d) = @{ $dmb->week_of_year($y,$n) };
($y,$m,$d) = @{ $self->__next_prev([$y,$m,$d,0,0,0],1,$dow,1) };
} else {
# DoW week
($y,$m,$d) = $dmt->_now('now',$$noupdate);
$$noupdate = 1;
my $tmp = $dmb->_config('firstday');
($y,$m,$d) = @{ $self->__next_prev([$y,$m,$d,0,0,0],1,$tmp,0) };
($y,$m,$d) = @{ $self->__next_prev([$y,$m,$d,0,0,0],1,$dow,1) };
}
} elsif ($nth && ! $got_y) {
# 'in one week' makes it here too so return nothing in that case so it
# drops through to the deltas.
return () if ($field_d || $field_w || $field_m || $field_y);
($y,$m,$d) = $dmt->_now('now',$$noupdate);
$$noupdate = 1;
$d = $nth;
} elsif ($special) {
my $delta = $$dmb{'data'}{'wordmatch'}{'offset_date'}{lc($special)};
my @delta = @{ $dmb->split('delta',$delta) };
($y,$m,$d) = $dmt->_now('now',$$noupdate);
my $tz = $dmt->_now('tz');
my $isdst = $dmt->_now('isdst');
$$noupdate = 1;
my($err,$offset,$abbrev,$date2);
($err,$date2,$offset,$isdst,$abbrev) =
$self->__calc_date_delta([$y,$m,$d,0,0,0],[@delta],0,0,$tz,$isdst);
($y,$m,$d) = @$date2;
if ($field_w) {
($y,$m,$d) = @{ $dmb->calc_date_days([$y,$m,$d],7) };
}
}
} else {
return ();
}
return($y,$m,$d,$dow);
}
# Supply defaults for missing values (Y/M/D)
sub _def_date {
my($self,$y,$m,$d,$noupdate) = @_;
$y = '' if (! defined $y);
$m = '' if (! defined $m);
$d = '' if (! defined $d);
my $defined = 0;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
# If year was not specified, defaults to current year.
#
# We'll also fix the year (turn 2-digit into 4-digit).
if ($y eq '') {
$y = $dmt->_now('y',$$noupdate);
$$noupdate = 1;
$$self{'data'}{'def'}[0] = '';
} else {
$y = $dmt->_fix_year($y);
$defined = 1;
}
# If the month was not specifed, but the year was, a default of
# 01 is supplied (this is a truncated date).
#
# If neither was specified, month defaults to the current month.
if ($m ne '') {
$defined = 1;
} elsif ($defined) {
$m = 1;
$$self{'data'}{'def'}[1] = 1;
} else {
$m = $dmt->_now('m',$$noupdate);
$$noupdate = 1;
$$self{'data'}{'def'}[1] = '';
}
# If the day was not specified, but the year or month was, a default
# of 01 is supplied (this is a truncated date).
#
# If none were specified, it default to the current day.
if ($d ne '') {
$defined = 1;
} elsif ($defined) {
$d = 1;
$$self{'data'}{'def'}[2] = 1;
} else {
$d = $dmt->_now('d',$$noupdate);
$$noupdate = 1;
$$self{'data'}{'def'}[2] = '';
}
return($y,$m,$d);
}
# Supply defaults for missing values (Y/DoY)
sub _def_date_doy {
my($self,$y,$doy,$noupdate) = @_;
$y = '' if (! defined $y);
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
# If year was not specified, defaults to current year.
#
# We'll also fix the year (turn 2-digit into 4-digit).
if ($y eq '') {
$y = $dmt->_now('y',$$noupdate);
$$noupdate = 1;
$$self{'data'}{'def'}[0] = '';
} else {
$y = $dmt->_fix_year($y);
}
# DoY must be specified.
my($m,$d);
my $ymd = $dmb->day_of_year($y,$doy);
return @$ymd;
}
# Supply defaults for missing values (YY/Www/D) and (Y/Www/D)
sub _def_date_dow {
my($self,$y,$w,$dow,$noupdate) = @_;
$y = '' if (! defined $y);
$w = '' if (! defined $w);
$dow = '' if (! defined $dow);
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
# If year was not specified, defaults to current year.
#
# If it was specified and is a single digit, it is the
# year in the current decade.
#
# We'll also fix the year (turn 2-digit into 4-digit).
if ($y ne '') {
if (length($y) == 1) {
my $tmp = $dmt->_now('y',$$noupdate);
$tmp =~ s/.$/$y/;
$y = $tmp;
$$noupdate = 1;
} else {
$y = $dmt->_fix_year($y);
}
} else {
$y = $dmt->_now('y',$$noupdate);
$$noupdate = 1;
$$self{'data'}{'def'}[0] = '';
}
# If week was not specified, it defaults to the current
# week. Get the first day of the week.
my($m,$d);
if ($w ne '') {
($y,$m,$d) = @{ $dmb->week_of_year($y,$w) };
} else {
my($nowy,$nowm,$nowd) = $dmt->_now('now',$$noupdate);
$$noupdate = 1;
my $noww;
($nowy,$noww) = $dmb->week_of_year([$nowy,$nowm,$nowd]);
($y,$m,$d) = @{ $dmb->week_of_year($nowy,$noww) };
}
# Handle the DoW
if ($dow eq '') {
$dow = 1;
}
my $n = $dmb->days_in_month($y,$m);
$d += ($dow-1);
if ($d > $n) {
$m++;
if ($m==13) {
$y++;
$m = 1;
}
$d = $d-$n;
}
return($y,$m,$d);
}
# Supply defaults for missing values (HH:MN:SS)
sub _def_time {
my($self,$h,$m,$s,$noupdate) = @_;
$h = '' if (! defined $h);
$m = '' if (! defined $m);
$s = '' if (! defined $s);
my $defined = 0;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
# If no time was specified, defaults to 00:00:00.
if ($h eq '' &&
$m eq '' &&
$s eq '') {
$$self{'data'}{'def'}[3] = 1;
$$self{'data'}{'def'}[4] = 1;
$$self{'data'}{'def'}[5] = 1;
return(0,0,0);
}
# If hour was not specified, defaults to current hour.
if ($h ne '') {
$defined = 1;
} else {
$h = $dmt->_now('h',$$noupdate);
$$noupdate = 1;
$$self{'data'}{'def'}[3] = '';
}
# If the minute was not specifed, but the hour was, a default of
# 00 is supplied (this is a truncated time).
#
# If neither was specified, minute defaults to the current minute.
if ($m ne '') {
$defined = 1;
} elsif ($defined) {
$m = 0;
$$self{'data'}{'def'}[4] = 1;
} else {
$m = $dmt->_now('mn',$$noupdate);
$$noupdate = 1;
$$self{'data'}{'def'}[4] = '';
}
# If the second was not specified (either the hour or the minute were),
# a default of 00 is supplied (this is a truncated time).
if ($s eq '') {
$s = 0;
$$self{'data'}{'def'}[5] = 1;
}
return($h,$m,$s);
}
########################################################################
# OTHER DATE METHODS
########################################################################
# Gets the date in the parsed timezone (if $type = ''), local timezone
# (if $type = 'local') or GMT timezone (if $type = 'gmt').
#
# Gets the string value in scalar context, the split value in list
# context.
#
sub value {
my($self,$type) = @_;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
my $date;
while (1) {
if (! $$self{'data'}{'set'}) {
$$self{'err'} = '[value] Object does not contain a date';
last;
}
$type = '' if (! $type);
if ($type eq 'gmt') {
if (! @{ $$self{'data'}{'gmt'} }) {
my $zone = $$self{'data'}{'tz'};
my $date = $$self{'data'}{'date'};
if ($zone eq 'Etc/GMT') {
$$self{'data'}{'gmt'} = $date;
} else {
my $isdst = $$self{'data'}{'isdst'};
my($err,$d) = $dmt->convert_to_gmt($date,$zone,$isdst);
if ($err) {
$$self{'err'} = '[value] Unable to convert date to GMT';
last;
}
$$self{'data'}{'gmt'} = $d;
}
}
$date = $$self{'data'}{'gmt'};
} elsif ($type eq 'local') {
if (! @{ $$self{'data'}{'loc'} }) {
my $zone = $$self{'data'}{'tz'};
$date = $$self{'data'}{'date'};
my $local = $dmt->_now('tz',1);
if ($zone eq $local) {
$$self{'data'}{'loc'} = $date;
} else {
my $isdst = $$self{'data'}{'isdst'};
my($err,$d) = $dmt->convert_to_local($date,$zone,$isdst);
if ($err) {
$$self{'err'} = '[value] Unable to convert date to localtime';
last;
}
$$self{'data'}{'loc'} = $d;
}
}
$date = $$self{'data'}{'loc'};
} else {
$date = $$self{'data'}{'date'};
}
last;
}
if ($$self{'err'}) {
if (wantarray) {
return ();
} else {
return '';
}
}
if (wantarray) {
return @$date;
} else {
return $dmb->join('date',$date);
}
}
sub cmp {
my($self,$date) = @_;
if ($$self{'err'} || ! $$self{'data'}{'set'}) {
warn "WARNING: [cmp] Arguments must be valid dates: date1\n";
return undef;
}
if (! ref($date) eq 'Date::Manip::Date') {
warn "WARNING: [cmp] Argument must be a Date::Manip::Date object\n";
return undef;
}
if ($$date{'err'} || ! $$date{'data'}{'set'}) {
warn "WARNING: [cmp] Arguments must be valid dates: date2\n";
return undef;
}
my($d1,$d2);
if ($$self{'data'}{'tz'} eq $$date{'data'}{'tz'}) {
$d1 = $self->value();
$d2 = $date->value();
} else {
$d1 = $self->value('gmt');
$d2 = $date->value('gmt');
}
return ($d1 cmp $d2);
}
BEGIN {
my %field = qw(y 0 m 1 d 2 h 3 mn 4 s 5);
sub set {
my($self,$field,@val) = @_;
$field = lc($field);
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
# Make sure $self includes a valid date (unless the entire date is
# being set, in which case it doesn't matter).
my($date,@def,$tz,$isdst);
if ($field eq 'zdate') {
# If {data}{set} = 2, we want to preserve the defaults. Also, we've
# already initialized.
#
# It is only set in the parse routines which means that this was
# called via _parse_check.
$self->_init() if ($$self{'data'}{'set'} != 2);
@def = @{ $$self{'data'}{'def'} };
} elsif ($field eq 'date') {
if ($$self{'data'}{'set'} && ! $$self{'err'}) {
$tz = $$self{'data'}{'tz'};
} else {
$tz = $dmt->_now('tz',1);
}
$self->_init();
@def = @{ $$self{'data'}{'def'} };
} else {
return 1 if ($$self{'err'} || ! $$self{'data'}{'set'});
$date = $$self{'data'}{'date'};
$tz = $$self{'data'}{'tz'};
$isdst = $$self{'data'}{'isdst'};
@def = @{ $$self{'data'}{'def'} };
$self->_init();
}
# Check the arguments
my($err,$new_tz,$new_date,$new_time);
if ($field eq 'date') {
if ($#val == 0) {
# date,DATE
$new_date = $val[0];
} elsif ($#val == 1) {
# date,DATE,ISDST
($new_date,$isdst) = @val;
} else {
$err = 1;
}
for (my $i=0; $i<=5; $i++) {
$def[$i] = 0 if ($def[$i]);
}
} elsif ($field eq 'time') {
if ($#val == 0) {
# time,TIME
$new_time = $val[0];
} elsif ($#val == 1) {
# time,TIME,ISDST
($new_time,$isdst) = @val;
} else {
$err = 1;
}
$def[3] = 0 if ($def[3]);
$def[4] = 0 if ($def[4]);
$def[5] = 0 if ($def[5]);
} elsif ($field eq 'zdate') {
if ($#val == 0) {
# zdate,DATE
$new_date = $val[0];
} elsif ($#val == 1 && ($val[1] eq '0' || $val[1] eq '1')) {
# zdate,DATE,ISDST
($new_date,$isdst) = @val;
} elsif ($#val == 1) {
# zdate,ZONE,DATE
($new_tz,$new_date) = @val;
} elsif ($#val == 2) {
# zdate,ZONE,DATE,ISDST
($new_tz,$new_date,$isdst) = @val;
} else {
$err = 1;
}
for (my $i=0; $i<=5; $i++) {
$def[$i] = 0 if ($def[$i]);
}
$tz = $dmt->_now('tz',1) if (! $new_tz);
} elsif ($field eq 'zone') {
if ($#val == -1) {
# zone
} elsif ($#val == 0 && ($val[0] eq '0' || $val[0] eq '1')) {
# zone,ISDST
$isdst = $val[0];
} elsif ($#val == 0) {
# zone,ZONE
$new_tz = $val[0];
} elsif ($#val == 1) {
# zone,ZONE,ISDST
($new_tz,$isdst) = @val;
} else {
$err = 1;
}
$tz = $dmt->_now('tz',1) if (! $new_tz);
} elsif (exists $field{$field}) {
my $i = $field{$field};
my $val;
if ($#val == 0) {
$val = $val[0];
} elsif ($#val == 1) {
($val,$isdst) = @val;
} else {
$err = 1;
}
$$date[$i] = $val;
$def[$i] = 0 if ($def[$i]);
} else {
$err = 2;
}
if ($err) {
if ($err == 1) {
$$self{'err'} = '[set] Invalid arguments';
} else {
$$self{'err'} = '[set] Invalid field';
}
return 1;
}
# Handle the arguments
if ($new_tz) {
my $tmp = $dmt->_zone($new_tz);
if ($tmp) {
# A zone/alias
$tz = $tmp;
} else {
# An offset
my ($err,@args);
push(@args,$date) if ($date);
push(@args,$new_tz);
push(@args,($isdst ? 'dstonly' : 'stdonly')) if (defined $isdst);
$tz = $dmb->zone(@args);
if (! $tz) {
$$self{'err'} = "[set] Invalid timezone argument: $new_tz";
return 1;
}
}
}
if ($new_date) {
if ($dmb->check($new_date)) {
$date = $new_date;
} else {
$$self{'err'} = '[set] Invalid date argument';
return 1;
}
}
if ($new_time) {
if ($dmb->check_time($new_time)) {
$$date[3] = $$new_time[0];
$$date[4] = $$new_time[1];
$$date[5] = $$new_time[2];
} else {
$$self{'err'} = '[set] Invalid time argument';
return 1;
}
}
# Check the date/timezone combination
my($abb,$off);
if ($tz eq 'etc/gmt') {
$abb = 'GMT';
$off = [0,0,0];
$isdst = 0;
} else {
my $per = $dmt->date_period($date,$tz,1,$isdst);
if (! $per) {
$$self{'err'} = '[set] Invalid date/timezone';
return 1;
}
$isdst = $$per[5];
$abb = $$per[4];
$off = $$per[3];
}
# Set the information
$$self{'data'}{'set'} = 1;
$$self{'data'}{'date'} = $date;
$$self{'data'}{'tz'} = $tz;
$$self{'data'}{'isdst'} = $isdst;
$$self{'data'}{'offset'}= $off;
$$self{'data'}{'abb'} = $abb;
$$self{'data'}{'def'} = [ @def ];
return 0;
}
}
########################################################################
# NEXT/PREV METHODS
sub prev {
my($self,@args) = @_;
return 1 if ($$self{'err'} || ! $$self{'data'}{'set'});
my $date = $$self{'data'}{'date'};
$date = $self->__next_prev($date,0,@args);
return 1 if (! defined($date));
$self->set('date',$date);
return 0;
}
sub next {
my($self,@args) = @_;
return 1 if ($$self{'err'} || ! $$self{'data'}{'set'});
my $date = $$self{'data'}{'date'};
$date = $self->__next_prev($date,1,@args);
return 1 if (! defined($date));
$self->set('date',$date);
return 0;
}
sub __next_prev {
my($self,$date,$next,$dow,$curr,$time) = @_;
my ($caller,$sign,$prev);
if ($next) {
$caller = 'next';
$sign = 1;
$prev = 0;
} else {
$caller = 'prev';
$sign = -1;
$prev = 1;
}
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
my $orig = [ @$date ];
# Check the time (if any)
if (defined($time)) {
if ($dow) {
# $time will refer to a full [H,MN,S]
my($err,$h,$mn,$s) = $dmb->_hms_fields({ 'out' => 'list' },$time);
if ($err) {
$$self{'err'} = "[$caller] invalid time argument";
return undef;
}
$time = [$h,$mn,$s];
} else {
# $time may have leading undefs
my @tmp = @$time;
if ($#tmp != 2) {
$$self{'err'} = "[$caller] invalid time argument";
return undef;
}
my($h,$mn,$s) = @$time;
if (defined($h)) {
$mn = 0 if (! defined($mn));
$s = 0 if (! defined($s));
} elsif (defined($mn)) {
$s = 0 if (! defined($s));
} else {
$s = 0 if (! defined($s));
}
$time = [$h,$mn,$s];
}
}
# Find the next DoW
if ($dow) {
if (! $dmb->_is_int($dow,1,7)) {
$$self{'err'} = "[$caller] Invalid DOW: $dow";
return undef;
}
# Find the next/previous occurrence of DoW
my $curr_dow = $dmb->day_of_week($date);
my $adjust = 0;
if ($dow == $curr_dow) {
$adjust = 1 if ($curr == 0);
} else {
my $num;
if ($next) {
# force $dow to be more than $curr_dow
$dow += 7 if ($dow<$curr_dow);
$num = $dow - $curr_dow;
} else {
# force $dow to be less than $curr_dow
$dow -= 7 if ($dow>$curr_dow);
$num = $curr_dow - $dow;
$num *= -1;
}
# Add/subtract $num days
$date = $dmb->calc_date_days($date,$num);
}
if (defined($time)) {
my ($y,$m,$d,$h,$mn,$s) = @$date;
($h,$mn,$s) = @$time;
$date = [$y,$m,$d,$h,$mn,$s];
}
my $cmp = $dmb->cmp($orig,$date);
$adjust = 1 if ($curr == 2 && $cmp != -1*$sign);
if ($adjust) {
# Add/subtract 1 week
$date = $dmb->calc_date_days($date,$sign*7);
}
return $date;
}
# Find the next Time
if (defined($time)) {
my ($h,$mn,$s) = @$time;
my $orig = [ @$date ];
my $cmp;
if (defined $h) {
# Find next/prev HH:MN:SS
@$date[3..5] = @$time;
$cmp = $dmb->cmp($orig,$date);
if ($cmp == -1) {
if ($prev) {
$date = $dmb->calc_date_days($date,-1);
}
} elsif ($cmp == 1) {
if ($next) {
$date = $dmb->calc_date_days($date,1);
}
} else {
if (! $curr) {
$date = $dmb->calc_date_days($date,$sign);
}
}
} elsif (defined $mn) {
# Find next/prev MN:SS
@$date[4..5] = @$time[1..2];
$cmp = $dmb->cmp($orig,$date);
if ($cmp == -1) {
if ($prev) {
$date = $dmb->calc_date_time($date,[-1,0,0]);
}
} elsif ($cmp == 1) {
if ($next) {
$date = $dmb->calc_date_time($date,[1,0,0]);
}
} else {
if (! $curr) {
$date = $dmb->calc_date_time($date,[$sign,0,0]);
}
}
} else {
# Find next/prev SS
$$date[5] = $$time[2];
$cmp = $dmb->cmp($orig,$date);
if ($cmp == -1) {
if ($prev) {
$date = $dmb->calc_date_time($date,[0,-1,0]);
}
} elsif ($cmp == 1) {
if ($next) {
$date = $dmb->calc_date_time($date,[0,1,0]);
}
} else {
if (! $curr) {
$date = $dmb->calc_date_time($date,[0,$sign,0]);
}
}
}
return $date;
}
$$self{'err'} = "[$caller] Either DoW or time (or both) required";
return undef;
}
########################################################################
# CALC METHOD
sub calc {
my($self,$obj,@args) = @_;
if (ref($obj) eq 'Date::Manip::Date') {
return $self->_calc_date_date($obj,@args);
} elsif (ref($obj) eq 'Date::Manip::Delta') {
return $self->_calc_date_delta($obj,@args);
} else {
return undef;
}
}
sub _calc_date_date {
my($self,$date,@args) = @_;
my $ret = $self->new_delta();
if ($$self{'err'} || ! $$self{'data'}{'set'}) {
$$ret{'err'} = '[calc] First object invalid (date)';
return $ret;
}
if ($$date{'err'} || ! $$date{'data'}{'set'}) {
$$ret{'err'} = '[calc] Second object invalid (date)';
return $ret;
}
# Handle subtract/mode arguments
my($subtract,$mode);
if ($#args == -1) {
($subtract,$mode) = (0,'');
} elsif ($#args == 0) {
if ($args[0] eq '0' || $args[0] eq '1') {
($subtract,$mode) = ($args[0],'');
} else {
($subtract,$mode) = (0,$args[0]);
}
} elsif ($#args == 1) {
($subtract,$mode) = @args;
} else {
$$ret{'err'} = '[calc] Invalid arguments';
return $ret;
}
$mode = 'exact' if (! $mode);
if ($mode !~ /^(business|bsemi|bapprox|approx|semi|exact)$/i) {
$$ret{'err'} = '[calc] Invalid mode argument';
return $ret;
}
# if business mode
# dates must be in the same timezone
# use dates in that zone
#
# otherwise if both dates are in the same timezone && approx/semi mode
# use the dates in that zone
#
# otherwise
# convert to gmt
# use those dates
my($date1,$date2,$tz1,$isdst1,$tz2,$isdst2);
if ($mode eq 'business' || $mode eq 'bapprox' || $mode eq 'bsemi') {
if ($$self{'data'}{'tz'} eq $$date{'data'}{'tz'}) {
$date1 = [ $self->value() ];
$date2 = [ $date->value() ];
$tz1 = $$self{'data'}{'tz'};
$tz2 = $tz1;
$isdst1 = $$self{'data'}{'isdst'};
$isdst2 = $$date{'data'}{'isdst'};
} else {
$$ret{'err'} = '[calc] Dates must be in the same timezone for ' .
'business mode calculations';
return $ret;
}
} elsif (($mode eq 'approx' || $mode eq 'semi') &&
$$self{'data'}{'tz'} eq $$date{'data'}{'tz'}) {
$date1 = [ $self->value() ];
$date2 = [ $date->value() ];
$tz1 = $$self{'data'}{'tz'};
$tz2 = $tz1;
$isdst1 = $$self{'data'}{'isdst'};
$isdst2 = $$date{'data'}{'isdst'};
} else {
$date1 = [ $self->value('gmt') ];
$date2 = [ $date->value('gmt') ];
$tz1 = 'GMT';
$tz2 = $tz1;
$isdst1 = 0;
$isdst2 = 0;
}
# Do the calculation
my(@delta);
if ($subtract) {
if ($mode eq 'business' || $mode eq 'exact' || $subtract == 2) {
@delta = @{ $self->__calc_date_date($mode,$date2,$tz2,$isdst2,
$date1,$tz1,$isdst1) };
} else {
@delta = @{ $self->__calc_date_date($mode,$date1,$tz1,$isdst1,
$date2,$tz2,$isdst2) };
@delta = map { -1*$_ } @delta;
}
} else {
@delta = @{ $self->__calc_date_date($mode,$date1,$tz1,$isdst1,
$date2,$tz2,$isdst2) };
}
# Save the delta
if ($mode eq 'business' || $mode eq 'bapprox' || $mode eq 'bsemi') {
$ret->set('business',\@delta);
} else {
$ret->set('delta',\@delta);
}
return $ret;
}
sub __calc_date_date {
my($self,$mode,$date1,$tz1,$isdst1,$date2,$tz2,$isdst2) = @_;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
my($dy,$dm,$dw,$dd,$dh,$dmn,$ds) = (0,0,0,0,0,0,0);
if ($mode eq 'approx' || $mode eq 'bapprox') {
my($y1,$m1,$d1,$h1,$mn1,$s1) = @$date1;
my($y2,$m2,$d2,$h2,$mn2,$s2) = @$date2;
$dy = $y2-$y1;
$dm = $m2-$m1;
if ($dy || $dm) {
# If $d1 is greater than the number of days allowed in the
# month $y2/$m2, set it equal to the number of days. In other
# words:
# Jan 31 2006 to Feb 28 2008 = 2 years 1 month
#
my $dim = $dmb->days_in_month($y2,$m2);
$d1 = $dim if ($d1 > $dim);
$date1 = [$y2,$m2,$d1,$h1,$mn1,$s1];
}
}
if ($mode eq 'semi' || $mode eq 'approx') {
# Calculate the number of weeks/days apart (temporarily ignoring
# DST effects).
$dd = $dmb->days_since_1BC($date2) -
$dmb->days_since_1BC($date1);
$dw = int($dd/7);
$dd -= $dw*7;
# Adding $dd to $date1 gives: ($y2,$m2,$d2, $h1,$mn1,$s1)
# Make sure this is valid (taking into account DST effects).
# If it isn't, make it valid.
if ($dw || $dd) {
my($y1,$m1,$d1,$h1,$mn1,$s1) = @$date1;
my($y2,$m2,$d2,$h2,$mn2,$s2) = @$date2;
$date1 = [$y2,$m2,$d2,$h1,$mn1,$s1];
}
if ($dy || $dm || $dw || $dd) {
my $force = ( ($dw > 0 || $dd > 0) ? 1 : -1 );
my($off,$isdst,$abb);
($date1,$off,$isdst,$abb) =
$self->_calc_date_check_dst($date1,$tz2,$isdst2,$force);
}
}
if ($mode eq 'bsemi' || $mode eq 'bapprox') {
# Calculate the number of weeks. Ignore the days
# part. Also, since there are no DST effects, we don't
# have to check for validity.
$dd = $dmb->days_since_1BC($date2) -
$dmb->days_since_1BC($date1);
$dw = int($dd/7);
$dd = 0;
$date1 = $dmb->calc_date_days($date1,$dw*7);
}
if ($mode eq 'exact' || $mode eq 'semi' || $mode eq 'approx') {
my $sec1 = $dmb->secs_since_1970($date1);
my $sec2 = $dmb->secs_since_1970($date2);
$ds = $sec2 - $sec1;
{
no integer;
$dh = int($ds/3600);
$ds -= $dh*3600;
}
$dmn = int($ds/60);
$ds -= $dmn*60;
}
if ($mode eq 'business' || $mode eq 'bsemi' || $mode eq 'bapprox') {
# Make sure both are work days
$date1 = $self->__nextprev_business_day(0,0,1,$date1);
$date2 = $self->__nextprev_business_day(0,0,1,$date2);
my($y1,$m1,$d1,$h1,$mn1,$s1) = @$date1;
my($y2,$m2,$d2,$h2,$mn2,$s2) = @$date2;
# Find out which direction we need to move $date1 to get to $date2
my $dir = 0;
if ($y1 < $y2) {
$dir = 1;
} elsif ($y1 > $y2) {
$dir = -1;
} elsif ($m1 < $m2) {
$dir = 1;
} elsif ($m1 > $m2) {
$dir = -1;
} elsif ($d1 < $d2) {
$dir = 1;
} elsif ($d1 > $d2) {
$dir = -1;
}
# Now do the day part (to get to the same day)
$dd = 0;
while ($dir) {
($y1,$m1,$d1) = @{ $dmb->calc_date_days([$y1,$m1,$d1],$dir) };
$dd += $dir if ($self->__is_business_day([$y1,$m1,$d1,0,0,0],0));
$dir = 0 if ($y1 == $y2 && $m1 == $m2 && $d1 == $d2);
}
# Both dates are now on a business day, and during business
# hours, so do the hr/min/sec part trivially
$dh = $h2-$h1;
$dmn = $mn2-$mn1;
$ds = $s2-$s1;
}
return [ $dy,$dm,$dw,$dd,$dh,$dmn,$ds ];
}
sub _calc_date_delta {
my($self,$delta,$subtract) = @_;
my $ret = $self->new_date();
if ($$self{'err'} || ! $$self{'data'}{'set'}) {
$$ret{'err'} = '[calc] Date object invalid';
return $ret;
}
if ($$delta{'err'}) {
$$ret{'err'} = '[calc] Delta object invalid';
return $ret;
}
# Get the date/delta fields
$subtract = 0 if (! $subtract);
my @delta = @{ $$delta{'data'}{'delta'} };
my @date = @{ $$self{'data'}{'date'} };
my $business = $$delta{'data'}{'business'};
my $tz = $$self{'data'}{'tz'};
my $isdst = $$self{'data'}{'isdst'};
my($err,$date2,$offset,$abbrev);
($err,$date2,$offset,$isdst,$abbrev) =
$self->__calc_date_delta([@date],[@delta],$subtract,$business,$tz,$isdst);
if ($err) {
$$ret{'err'} = '[calc] Unable to perform calculation';
} else {
$$ret{'data'}{'set'} = 1;
$$ret{'data'}{'date'} = $date2;
$$ret{'data'}{'tz'} = $tz;
$$ret{'data'}{'isdst'} = $isdst;
$$ret{'data'}{'offset'}= $offset;
$$ret{'data'}{'abb'} = $abbrev;
}
return $ret;
}
sub __calc_date_delta {
my($self,$date,$delta,$subtract,$business,$tz,$isdst) = @_;
my ($dy,$dm,$dw,$dd,$dh,$dmn,$ds) = @$delta;
my @date = @$date;
my ($err,$date2,$offset,$abbrev);
# In business mode, daylight saving time is ignored, so days are
# of a constant, known length, so they'll be done in the exact
# function. Otherwise, they'll be done in the approximate function.
#
# Also in business mode, if $subtract = 2, then the starting date
# must be a business date or an error occurs.
my($dd_exact,$dd_approx);
if ($business) {
$dd_exact = $dd;
$dd_approx = 0;
if ($subtract == 2 && ! $self->__is_business_day($date,1)) {
return (1);
}
} else {
$dd_exact = 0;
$dd_approx = $dd;
}
if ($subtract == 2 && ($dy || $dm || $dw || $dd_approx)) {
# For subtract=2:
# DATE = RET + DELTA
#
# The delta consisists of an approximate part (which is added first)
# and an exact part (added second):
# DATE = RET + DELTA(approx) + DELTA(exact)
# DATE = RET' + DELTA(exact)
# where RET' = RET + DELTA(approx)
#
# For an exact delta, subtract==2 and subtract==1 are equivalent,
# so this can be written:
# DATE - DELTA(exact) = RET'
#
# So the inverse subtract only needs include the approximate
# portion of the delta.
($err,$date2,$offset,$isdst,$abbrev) =
$self->__calc_date_delta_exact([@date],[-1*$dd_exact,-1*$dh,-1*$dmn,-1*$ds],
$business,$tz,$isdst);
($err,$date2,$offset,$isdst,$abbrev) =
$self->__calc_date_delta_inverse($date2,[$dy,$dm,$dw,$dd_approx],
$business,$tz,$isdst)
if (! $err);
} else {
# We'll add the approximate part, followed by the exact part.
# After the approximate part, we need to make sure we're on
# a valid business day in business mode.
($dy,$dm,$dw,$dd_exact,$dd_approx,$dh,$dmn,$ds) =
map { -1*$_ } ($dy,$dm,$dw,$dd_exact,$dd_approx,$dh,$dmn,$ds)
if ($subtract);
@$date2 = @date;
if ($dy || $dm || $dw || $dd) {
($err,$date2,$offset,$isdst,$abbrev) =
$self->__calc_date_delta_approx($date2,[$dy,$dm,$dw,$dd_approx],
$business,$tz,$isdst);
} elsif ($business) {
$date2 = $self->__nextprev_business_day(0,0,1,$date2);
}
($err,$date2,$offset,$isdst,$abbrev) =
$self->__calc_date_delta_exact($date2,[$dd_exact,$dh,$dmn,$ds],
$business,$tz,$isdst)
if (! $err && ($dd_exact || $dh || $dmn || $ds));
}
return($err,$date2,$offset,$isdst,$abbrev);
}
# Do the inverse part of a calculation.
#
# $delta = [$dy,$dm,$dw,$dd]
#
sub __calc_date_delta_inverse {
my($self,$date,$delta,$business,$tz,$isdst) = @_;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
my @date2;
# Given: DATE1, DELTA
# Find: DATE2
# where DATE2 + DELTA = DATE1
#
# Start with:
# DATE2 = DATE1 - DELTA
#
# if (DATE2+DELTA < DATE1)
# while (1)
# DATE2 = DATE2 + 1 day
# if DATE2+DELTA < DATE1
# next
# elsif DATE2+DELTA > DATE1
# return ERROR
# else
# return DATE2
# done
#
# elsif (DATE2+DELTA > DATE1)
# while (1)
# DATE2 = DATE2 - 1 day
# if DATE2+DELTA > DATE1
# next
# elsif DATE2+DELTA < DATE1
# return ERROR
# else
# return DATE2
# done
#
# else
# return DATE2
if ($business) {
my $date1 = $date;
my ($err,$date2,$off,$isd,$abb,@del,$tmp,$cmp);
@del = map { $_*-1 } @$delta;
($err,$date2,$off,$isd,$abb) =
$self->__calc_date_delta_approx($date,[@del],$business,$tz,$isdst);
($err,$tmp,$off,$isd,$abb) =
$self->__calc_date_delta_approx($date2,$delta,$business,$tz,$isdst);
$cmp = $self->_cmp_date($tmp,$date1);
if ($cmp < 0) {
while (1) {
$date2 = $self->__nextprev_business_day(0,1,0,$date2);
($err,$tmp,$off,$isd,$abb) =
$self->__calc_date_delta_approx($date2,$delta,$business,$tz,$isdst);
$cmp = $self->_cmp_date($tmp,$date1);
if ($cmp < 0) {
next;
} elsif ($cmp > 0) {
return (1);
} else {
last;
}
}
} elsif ($cmp > 0) {
while (1) {
$date2 = $self->__nextprev_business_day(1,1,0,$date2);
($err,$tmp,$off,$isd,$abb) =
$self->__calc_date_delta_approx($date2,$delta,$business,$tz,$isdst);
$cmp = $self->_cmp_date($tmp,$date1);
if ($cmp > 0) {
next;
} elsif ($cmp < 0) {
return (1);
} else {
last;
}
}
}
@date2 = @$date2;
} else {
my @tmp = @$date[0..2]; # [y,m,d]
my @hms = @$date[3..5]; # [h,m,s]
my $date1 = [@tmp];
my $date2 = $dmb->_calc_date_ymwd($date1,$delta,1);
my $tmp = $dmb->_calc_date_ymwd($date2,$delta);
my $cmp = $self->_cmp_date($tmp,$date1);
if ($cmp < 0) {
while (1) {
$date2 = $dmb->calc_date_days($date2,1);
$tmp = $dmb->_calc_date_ymwd($date2,$delta);
$cmp = $self->_cmp_date($tmp,$date1);
if ($cmp < 0) {
next;
} elsif ($cmp > 0) {
return (1);
} else {
last;
}
}
} elsif ($cmp > 0) {
while (1) {
$date2 = $dmb->calc_date_days($date2,-1);
$tmp = $dmb->_calc_date_ymwd($date2,$delta);
$cmp = $self->_cmp_date($tmp,$date1);
if ($cmp > 0) {
next;
} elsif ($cmp < 0) {
return (1);
} else {
last;
}
}
}
@date2 = (@$date2,@hms);
}
# Make sure DATE2 is valid (within DST constraints) and
# return it.
my($date2,$abb,$off,$err);
($date2,$off,$isdst,$abb) = $self->_calc_date_check_dst([@date2],$tz,$isdst,0);
return (1) if (! defined($date2));
return (0,$date2,$off,$isdst,$abb);
}
sub _cmp_date {
my($self,$date0,$date1) = @_;
return ($$date0[0] <=> $$date1[0] ||
$$date0[1] <=> $$date1[1] ||
$$date0[2] <=> $$date1[2]);
}
# Do the approximate part of a calculation.
#
sub __calc_date_delta_approx {
my($self,$date,$delta,$business,$tz,$isdst) = @_;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
my($y,$m,$d,$h,$mn,$s) = @$date;
my($dy,$dm,$dw,$dd) = @$delta;
#
# Do the year/month part.
#
# If we are past the last day of a month, move the date back to
# the last day of the month. i.e. Jan 31 + 1 month = Feb 28.
#
$y += $dy if ($dy);
$dmb->_mod_add(-12,$dm,\$m,\$y) # -12 means 1-12 instead of 0-11
if ($dm);
my $dim = $dmb->days_in_month($y,$m);
$d = $dim if ($d > $dim);
#
# Do the week part.
#
# The week is treated as 7 days for both business and non-business
# calculations.
#
# In a business calculation, make sure we're on a business date.
#
if ($business) {
($y,$m,$d) = @{ $dmb->calc_date_days([$y,$m,$d],$dw*7) } if ($dw);
($y,$m,$d,$h,$mn,$s) =
@{ $self->__nextprev_business_day(0,0,1,[$y,$m,$d,$h,$mn,$s]) };
} else {
$dd += $dw*7;
}
#
# Now do the day part. $dd is always 0 in business calculations.
#
if ($dd) {
($y,$m,$d) = @{ $dmb->calc_date_days([$y,$m,$d],$dd) };
}
#
# At this point, we need to make sure that we're a valid date
# (within the constraints of DST).
#
# If it is not valid in this offset, try the other one. If neither
# works, then we want the the date to be 24 hours later than the
# previous day at this time (if $dd > 0) or 24 hours earlier than
# the next day at this time (if $dd < 0). We'll use the 24 hour
# definition even for business days, but then we'll double check
# that the resulting date is a business date.
#
my $force = ( ($dd > 0 || $dw > 0 || $dm > 0 || $dy > 0) ? 1 : -1 );
my($off,$abb);
($date,$off,$isdst,$abb) =
$self->_calc_date_check_dst([$y,$m,$d,$h,$mn,$s],$tz,$isdst,$force);
return (0,$date,$off,$isdst,$abb);
}
# Do the exact part of a calculation.
#
sub __calc_date_delta_exact {
my($self,$date,$delta,$business,$tz,$isdst) = @_;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
if ($business) {
# Simplify hours/minutes/seconds where the day length is defined
# by the start/end of the business day.
my ($dd,$dh,$dmn,$ds) = @$delta;
my ($y,$m,$d,$h,$mn,$s)= @$date;
my ($hbeg,$mbeg,$sbeg) = @{ $$dmb{'data'}{'calc'}{'workdaybeg'} };
my ($hend,$mend,$send) = @{ $$dmb{'data'}{'calc'}{'workdayend'} };
my $bdlen = $$dmb{'data'}{'len'}{'bdlength'};
no integer;
my $tmp;
$ds += $dh*3600 + $dmn*60;
$tmp = int($ds/$bdlen);
$dd += $tmp;
$ds -= $tmp*$bdlen;
$dh = int($ds/3600);
$ds -= $dh*3600;
$dmn = int($ds/60);
$ds -= $dmn*60;
use integer;
if ($dd) {
my $prev = 0;
if ($dd < 1) {
$prev = 1;
$dd *= -1;
}
($y,$m,$d,$h,$mn,$s) =
@{ $self->__nextprev_business_day($prev,$dd,0,[$y,$m,$d,$h,$mn,$s]) };
}
# At this point, we're adding less than a day for the
# hours/minutes/seconds part AND we know that the current
# day is during business hours.
#
# We'll add them (without affecting days... we'll need to
# test things by hand to make sure we should or shouldn't
# do that.
$dmb->_mod_add(60,$ds,\$s,\$mn);
$dmb->_mod_add(60,$dmn,\$mn,\$h);
$h += $dh;
# Note: it's possible that $h > 23 at this point or $h < 0
if ($h > $hend ||
($h == $hend && $mn > $mend) ||
($h == $hend && $mn == $mend && $s > $send) ||
($h == $hend && $mn == $mend && $s == $send)) {
# We've gone past the end of the business day.
my $t2 = $dmb->calc_time_time([$h,$mn,$s],[$hend,$mend,$send],1);
while (1) {
($y,$m,$d) = @{ $dmb->calc_date_days([$y,$m,$d],1) };
last if ($self->__is_business_day([$y,$m,$d,$h,$mn,$s]));
}
($h,$mn,$s) = @{ $dmb->calc_time_time([$hbeg,$mbeg,$sbeg],$t2) };
} elsif ($h < $hbeg ||
($h == $hbeg && $mn < $mbeg) ||
($h == $hbeg && $mn == $mbeg && $s < $sbeg)) {
# We've gone back past the start of the business day.
my $t2 = $dmb->calc_time_time([$hbeg,$mbeg,$sbeg],[$h,$mn,$s],1);
while (1) {
($y,$m,$d) = @{ $dmb->calc_date_days([$y,$m,$d],-1) };
last if ($self->__is_business_day([$y,$m,$d,$h,$mn,$s]));
}
($h,$mn,$s) = @{ $dmb->calc_time_time([$hend,$mend,$send],$t2,1) };
}
# Now make sure that the date is valid within DST constraints.
my $force = ( ($dd > 0 || $dh > 0 || $dmn > 0 || $ds > 0) ? 1 : -1 );
my($off,$abb);
($date,$off,$isdst,$abb) =
$self->_calc_date_check_dst([$y,$m,$d,$h,$mn,$s],$tz,$isdst,$force);
return (0,$date,$off,$isdst,$abb);
} else {
# Convert to GTM
# Do the calculation
# Convert back
my ($dd,$dh,$dm,$ds) = @$delta; # $dd is always 0
my $del = [$dh,$dm,$ds];
my ($err,$offset,$abbrev);
($err,$date,$offset,$isdst,$abbrev) =
$dmt->_convert('__calc_date_delta_exact',$date,$tz,'GMT',$isdst);
$date = $dmb->calc_date_time($date,$del,0);
($err,$date,$offset,$isdst,$abbrev) =
$dmt->_convert('__calc_date_delta_exact',$date,'GMT',$tz,$isdst);
return($err,$date,$offset,$isdst,$abbrev);
}
}
# This checks to see which time (STD or DST) a date is in. It checks
# $isdst first, and the other value (1-$isdst) second.
#
# If the date is found in either time, it is returned.
#
# If the date is NOT found, then we got here by adding/subtracting 1 day
# from a different value, and we've obtained an invalid value. In this
# case, if $force = 0, then return nothing.
#
# If $force = 1, then go to the previous day and add 24 hours. If force
# is -1, then go to the next day and subtract 24 hours.
#
# Returns:
# ($date,$off,$isdst,$abb)
# or
# (undef)
#
sub _calc_date_check_dst {
my($self,$date,$tz,$isdst,$force) = @_;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
my($abb,$off,$err);
# Try the date as is in both ISDST and 1-ISDST times
my $per = $dmt->date_period($date,$tz,1,$isdst);
if ($per) {
$abb = $$per[4];
$off = $$per[3];
return($date,$off,$isdst,$abb);
}
$per = $dmt->date_period($date,$tz,1,1-$isdst);
if ($per) {
$isdst = 1-$isdst;
$abb = $$per[4];
$off = $$per[3];
return($date,$off,$isdst,$abb);
}
# If we made it here, the date is invalid in this timezone.
# Either return undef, or add/subtract a day from the date
# and find out what time period we're in (all we care about
# is the ISDST value).
if (! $force) {
return(undef);
}
my($dd);
if ($force > 0) {
$date = $dmb->calc_date_days($date,-1);
$dd = 1;
} else {
$date = $dmb->calc_date_days($date,+1);
$dd = -1;
}
$per = $dmt->date_period($date,$tz,1,$isdst);
$isdst = (1-$isdst) if (! $per);
# Now, convert it to GMT, add/subtract 24 hours, and convert
# it back.
($err,$date,$off,$isdst,$abb) = $dmt->convert_to_gmt($date,$tz,$isdst);
$date = $dmb->calc_date_days($date,$dd);
($err,$date,$off,$isdst,$abb) = $dmt->convert_from_gmt($date,$tz);
return($date,$off,$isdst,$abb);
}
########################################################################
# MISC METHODS
sub secs_since_1970_GMT {
my($self,$secs) = @_;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
if (defined $secs) {
my $date = $dmb->secs_since_1970($secs);
my $err;
($err,$date) = $dmt->convert_from_gmt($date);
return 1 if ($err);
$self->set('date',$date);
return 0;
}
my @date = $self->value('gmt');
$secs = $dmb->secs_since_1970(\@date);
return $secs;
}
sub week_of_year {
my($self,$first) = @_;
if ($$self{'err'} || ! $$self{'data'}{'set'}) {
warn "WARNING: [week_of_year] Object must contain a valid date\n";
return undef;
}
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
my $date = $$self{'data'}{'date'};
my $y = $$date[0];
my($day,$dow,$doy,$f);
$doy = $dmb->day_of_year($date);
# The date in January which must belong to the first week, and
# it's DayOfWeek.
if ($dmb->_config('jan1week1')) {
$day=1;
} else {
$day=4;
}
$dow = $dmb->day_of_week([$y,1,$day]);
# The start DayOfWeek. If $first is passed in, use it. Otherwise,
# use FirstDay.
if (! $first) {
$first = $dmb->_config('firstday');
}
# Find the pseudo-date of the first day of the first week (it may
# be negative meaning it occurs last year).
$first -= 7 if ($first > $dow);
$day -= ($dow-$first);
return 0 if ($day>$doy); # Day is in last week of previous year
return (($doy-$day)/7 + 1);
}
sub complete {
my($self,$field) = @_;
if ($$self{'err'} || ! $$self{'data'}{'set'}) {
warn "WARNING: [complete] Object must contain a valid date\n";
return undef;
}
if (! $field) {
return 1 if (! $$self{'data'}{'def'}[1] &&
! $$self{'data'}{'def'}[2] &&
! $$self{'data'}{'def'}[3] &&
! $$self{'data'}{'def'}[4] &&
! $$self{'data'}{'def'}[5]);
return 0;
}
if ($field eq 'm') {
return 1 if (! $$self{'data'}{'def'}[1]);
}
if ($field eq 'd') {
return 1 if (! $$self{'data'}{'def'}[2]);
}
if ($field eq 'h') {
return 1 if (! $$self{'data'}{'def'}[3]);
}
if ($field eq 'mn') {
return 1 if (! $$self{'data'}{'def'}[4]);
}
if ($field eq 's') {
return 1 if (! $$self{'data'}{'def'}[5]);
}
return 0;
}
sub convert {
my($self,$zone) = @_;
if ($$self{'err'} || ! $$self{'data'}{'set'}) {
warn "WARNING: [convert] Object must contain a valid date\n";
return 1;
}
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
my $zonename = $dmt->_zone($zone);
if (! $zonename) {
$$self{'err'} = "[convert] Unable to determine timezone: $zone";
return 1;
}
my $date0 = $$self{'data'}{'date'};
my $zone0 = $$self{'data'}{'tz'};
my $isdst0 = $$self{'data'}{'isdst'};
my($err,$date,$off,$isdst,$abb) = $dmt->convert($date0,$zone0,$zonename,$isdst0);
if ($err) {
$$self{'err'} = '[convert] Unable to convert date to new timezone';
return 1;
}
$self->_init();
$$self{'data'}{'date'} = $date;
$$self{'data'}{'tz'} = $zonename;
$$self{'data'}{'isdst'} = $isdst;
$$self{'data'}{'offset'} = $off;
$$self{'data'}{'abb'} = $abb;
$$self{'data'}{'set'} = 1;
return 0;
}
########################################################################
# BUSINESS DAY METHODS
sub is_business_day {
my($self,$checktime) = @_;
if ($$self{'err'} || ! $$self{'data'}{'set'}) {
warn "WARNING: [is_business_day] Object must contain a valid date\n";
return undef;
}
my $date = $$self{'data'}{'date'};
return $self->__is_business_day($date,$checktime);
}
sub __is_business_day {
my($self,$date,$checktime) = @_;
my($y,$m,$d,$h,$mn,$s) = @$date;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
# Return 0 if it's a weekend.
my $dow = $dmb->day_of_week([$y,$m,$d]);
return 0 if ($dow < $dmb->_config('workweekbeg') ||
$dow > $dmb->_config('workweekend'));
# Return 0 if it's not during work hours (and we're checking
# for that).
if ($checktime &&
! $dmb->_config('workday24hr')) {
my $t = $dmb->join('hms',[$h,$mn,$s]);
my $t0 = $dmb->join('hms',$$dmb{'data'}{'calc'}{'workdaybeg'});
my $t1 = $dmb->join('hms',$$dmb{'data'}{'calc'}{'workdayend'});
return 0 if ($t lt $t0 || $t gt $t1);
}
# Check for holidays
$self->_holidays($y,2) unless ($$dmb{'data'}{'init_holidays'});
return 0 if (exists $$dmb{'data'}{'holidays'}{'dates'} &&
exists $$dmb{'data'}{'holidays'}{'dates'}{$y+0} &&
exists $$dmb{'data'}{'holidays'}{'dates'}{$y+0}{$m+0} &&
exists $$dmb{'data'}{'holidays'}{'dates'}{$y+0}{$m+0}{$d+0});
return 1;
}
sub list_holidays {
my($self,$y) = @_;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
$y = $dmt->_now('y',1) if (! $y);
$self->_holidays($y,2);
my @ret;
my @m = sort { $a <=> $b } keys %{ $$dmb{'data'}{'holidays'}{'dates'}{$y+0} };
foreach my $m (@m) {
my @d = sort { $a <=> $b } keys %{ $$dmb{'data'}{'holidays'}{'dates'}{$y+0}{$m} };
foreach my $d (@d) {
my $hol = $self->new_date();
$hol->set('date',[$y,$m,$d,0,0,0]);
push(@ret,$hol);
}
}
return @ret;
}
sub holiday {
my($self) = @_;
if ($$self{'err'} || ! $$self{'data'}{'set'}) {
warn "WARNING: [holiday] Object must contain a valid date\n";
return undef;
}
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
my($y,$m,$d) = @{ $$self{'data'}{'date'} };
$self->_holidays($y,2);
if (exists $$dmb{'data'}{'holidays'}{'dates'}{$y+0} &&
exists $$dmb{'data'}{'holidays'}{'dates'}{$y+0}{$m+0} &&
exists $$dmb{'data'}{'holidays'}{'dates'}{$y+0}{$m+0}{$d+0}) {
my @tmp = @{ $$dmb{'data'}{'holidays'}{'dates'}{$y+0}{$m+0}{$d+0} };
if (wantarray) {
return () if (! @tmp);
return @tmp;
} else {
return '' if (! @tmp);
return $tmp[0];
}
}
return undef;
}
sub next_business_day {
my($self,$off,$checktime) = @_;
if ($$self{'err'} || ! $$self{'data'}{'set'}) {
warn "WARNING: [next_business_day] Object must contain a valid date\n";
return undef;
}
my $date = $$self{'data'}{'date'};
$date = $self->__nextprev_business_day(0,$off,$checktime,$date);
$self->set('date',$date);
}
sub prev_business_day {
my($self,$off,$checktime) = @_;
if ($$self{'err'} || ! $$self{'data'}{'set'}) {
warn "WARNING: [prev_business_day] Object must contain a valid date\n";
return undef;
}
my $date = $$self{'data'}{'date'};
$date = $self->__nextprev_business_day(1,$off,$checktime,$date);
$self->set('date',$date);
}
sub __nextprev_business_day {
my($self,$prev,$off,$checktime,$date) = @_;
my($y,$m,$d,$h,$mn,$s) = @$date;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
# Get day 0
while (! $self->__is_business_day([$y,$m,$d,$h,$mn,$s],$checktime)) {
if ($checktime) {
($y,$m,$d,$h,$mn,$s) =
@{ $self->__next_prev([$y,$m,$d,$h,$mn,$s],1,undef,0,
$$dmb{'data'}{'calc'}{'workdaybeg'}) };
} else {
# Move forward 1 day
($y,$m,$d) = @{ $dmb->calc_date_days([$y,$m,$d],1) };
}
}
# Move $off days into the future/past
while ($off > 0) {
while (1) {
if ($prev) {
# Move backward 1 day
($y,$m,$d) = @{ $dmb->calc_date_days([$y,$m,$d],-1) };
} else {
# Move forward 1 day
($y,$m,$d) = @{ $dmb->calc_date_days([$y,$m,$d],1) };
}
last if ($self->__is_business_day([$y,$m,$d,$h,$mn,$s]));
}
$off--;
}
return [$y,$m,$d,$h,$mn,$s];
}
sub nearest_business_day {
my($self,$tomorrow) = @_;
if ($$self{'err'} || ! $$self{'data'}{'set'}) {
warn "WARNING: [nearest_business_day] Object must contain a valid date\n";
return undef;
}
my $date = $$self{'data'}{'date'};
$date = $self->__nearest_business_day($tomorrow,$date);
# If @date is empty, the date is a business day and doesn't need
# to be changed.
return if (! defined($date));
$self->set('date',$date);
}
sub __nearest_business_day {
my($self,$tomorrow,$date) = @_;
# We're done if this is a business day
return undef if ($self->__is_business_day($date,0));
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
$tomorrow = $dmb->_config('tomorrowfirst') if (! defined $tomorrow);
my($a1,$a2);
if ($tomorrow) {
($a1,$a2) = (1,-1);
} else {
($a1,$a2) = (-1,1);
}
my ($y,$m,$d,$h,$mn,$s) = @$date;
my ($y1,$m1,$d1) = ($y,$m,$d);
my ($y2,$m2,$d2) = ($y,$m,$d);
while (1) {
($y1,$m1,$d1) = @{ $dmb->calc_date_days([$y1,$m1,$d1],$a1) };
if ($self->__is_business_day([$y1,$m1,$d1,$h,$mn,$s],0)) {
($y,$m,$d) = ($y1,$m1,$d1);
last;
}
($y2,$m2,$d2) = @{ $dmb->calc_date_days([$y2,$m2,$d2],$a2) };
if ($self->__is_business_day([$y2,$m2,$d2,$h,$mn,$s],0)) {
($y,$m,$d) = ($y2,$m2,$d2);
last;
}
}
return [$y,$m,$d,$h,$mn,$s];
}
# We need to create all the objects which will be used to determine holidays.
# By doing this once only, a lot of time is saved.
#
sub _holiday_objs {
my($self) = @_;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
$$dmb{'data'}{'holidays'}{'init'} = 1;
# Go through all of the strings from the config file.
#
my (@str) = @{ $$dmb{'data'}{'sections'}{'holidays'} };
$$dmb{'data'}{'holidays'}{'hols'} = [];
while (@str) {
my($string) = shift(@str);
my($name) = shift(@str);
# If $string is a parse_date string AND it contains a year, we'll
# store the date as a holiday, but not store the holiday description
# so it never needs to be re-parsed.
my $date = $self->new_date();
my $err = $date->parse_date($string);
if (! $err) {
if ($$date{'data'}{'def'}[0] eq '') {
push(@{ $$dmb{'data'}{'holidays'}{'hols'} },$string,$name);
} else {
my($y,$m,$d) = @{ $$date{'data'}{'date'} };
if (exists $$dmb{'data'}{'holidays'}{'dates'}{$y+0}{$m+0}{$d+0}) {
push @{ $$dmb{'data'}{'holidays'}{'dates'}{$y+0}{$m+0}{$d+0} },$name;
} else {
$$dmb{'data'}{'holidays'}{'dates'}{$y+0}{$m+0}{$d+0} = [ $name ];
}
}
next;
}
$date->err(1);
# If $string is a recurrence, we'll create a Recur object (which we
# only have to do once) and store it.
my $recur = $self->new_recur();
$recur->_holiday();
$err = $recur->parse($string);
if (! $err) {
push(@{ $$dmb{'data'}{'holidays'}{'hols'} },$recur,$name);
next;
}
$recur->err(1);
warn "WARNING: invalid holiday description: $string\n";
}
}
# Make sure that holidays are set for a given year.
#
# $$dmb{'data'}{'holidays'}{'years'}{$year} = 0 nothing done
# 1 this year done
# 2 both adjacent years done
#
sub _holidays {
my($self,$year,$level) = @_;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
$self->_holiday_objs($year) if (! $$dmb{'data'}{'holidays'}{'init'});
$$dmb{'data'}{'holidays'}{'years'}{$year} = 0
if (! exists $$dmb{'data'}{'holidays'}{'years'}{$year});
my $curr_level = $$dmb{'data'}{'holidays'}{'years'}{$year};
return if ($curr_level >= $level);
$$dmb{'data'}{'holidays'}{'years'}{$year} = $level;
# Parse the year
if ($curr_level == 0) {
$self->_holidays_year($year);
return if ($level == 1);
}
# Parse the years around it.
$self->_holidays($year-1,1);
$self->_holidays($year+1,1);
}
sub _holidays_year {
my($self,$y) = @_;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
# Get the objects and set them to use the new year. Also, get the
# range for recurrences.
my @hol = @{ $$dmb{'data'}{'holidays'}{'hols'} };
my $beg = $self->new_date();
$beg->set('date',[$y-1,12,1,0,0,0]);
my $end = $self->new_date();
$end->set('date',[$y+1,2,1,0,0,0]);
# Get the date for each holiday.
$$dmb{'data'}{'init_holidays'} = 1;
while (@hol) {
my($obj) = shift(@hol);
my($name) = shift(@hol);
$$dmb{'data'}{'tmpnow'} = [$y,1,1,0,0,0];
if (ref($obj)) {
# It's a recurrence
# If the recurrence has a date range built in, we won't override it.
# Otherwise, we'll only look for dates in this year.
if ($obj->start() && $obj->end()) {
$obj->dates();
} else {
$obj->dates($beg,$end);
}
foreach my $i (keys %{ $$obj{'data'}{'dates'} }) {
next if ($$obj{'data'}{'saved'}{$i});
my $date = $$obj{'data'}{'dates'}{$i};
my($y,$m,$d) = @{ $$date{'data'}{'date'} };
if (exists $$dmb{'data'}{'holidays'}{'dates'}{$y+0}{$m+0}{$d+0}) {
push @{ $$dmb{'data'}{'holidays'}{'dates'}{$y+0}{$m+0}{$d+0} },$name;
} else {
$$dmb{'data'}{'holidays'}{'dates'}{$y+0}{$m+0}{$d+0} = [$name];
}
$$obj{'data'}{'saved'}{$i} = 1;
}
} else {
my $date = $self->new_date();
$date->parse_date($obj);
my($y,$m,$d) = @{ $$date{'data'}{'date'} };
if (exists $$dmb{'data'}{'holidays'}{'dates'}{$y+0}{$m+0}{$d+0}) {
push @{ $$dmb{'data'}{'holidays'}{'dates'}{$y+0}{$m+0}{$d+0} },$name;
} else {
$$dmb{'data'}{'holidays'}{'dates'}{$y+0}{$m+0}{$d+0} = [$name];
}
}
$$dmb{'data'}{'tmpnow'} = [];
}
$$dmb{'data'}{'init_holidays'} = 0;
}
########################################################################
# PRINTF METHOD
BEGIN {
my %pad_0 = map { $_,1 } qw ( Y m d H M S I j G W L U );
my %pad_sp = map { $_,1 } qw ( y f e k i );
my %hr = map { $_,1 } qw ( H k I i );
my %dow = map { $_,1 } qw ( v a A w );
my %num = map { $_,1 } qw ( Y m d H M S y f e k I i j G W L U );
sub printf {
my($self,@in) = @_;
if ($$self{'err'} || ! $$self{'data'}{'set'}) {
warn "WARNING: [printf] Object must contain a valid date\n";
return undef;
}
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
my($y,$m,$d,$h,$mn,$s) = @{ $$self{'data'}{'date'} };
my(@out);
foreach my $in (@in) {
my $out = '';
while ($in) {
last if ($in eq '%');
# Everything up to the first '%'
if ($in =~ s/^([^%]+)//) {
$out .= $1;
next;
}
# Extended formats: %<...>
if ($in =~ s/^%<([^>]+)>//) {
my $f = $1;
my $val;
if ($f =~ /^a=([1-7])$/) {
$val = $$dmb{'data'}{'wordlist'}{'day_abb'}[$1-1];
} elsif ($f =~ /^v=([1-7])$/) {
$val = $$dmb{'data'}{'wordlist'}{'day_char'}[$1-1];
} elsif ($f =~ /^A=([1-7])$/) {
$val = $$dmb{'data'}{'wordlist'}{'day_name'}[$1-1];
} elsif ($f =~ /^p=([1-2])$/) {
$val = $$dmb{'data'}{'wordlist'}{'ampm'}[$1-1];
} elsif ($f =~ /^b=(0?[1-9]|1[0-2])$/) {
$val = $$dmb{'data'}{'wordlist'}{'month_abb'}[$1-1];
} elsif ($f =~ /^B=(0?[1-9]|1[0-2])$/) {
$val = $$dmb{'data'}{'wordlist'}{'month_name'}[$1-1];
} elsif ($f =~ /^E=(0?[1-9]|[1-4][0-9]|5[0-3])$/) {
$val = $$dmb{'data'}{'wordlist'}{'nth'}[$1-1];
} else {
$val = '%<' . $1 . '>';
}
$out .= $val;
next;
}
# Normals one-character formats
$in =~ s/^%(.)//s;
my $f = $1;
if (exists $$self{'data'}{'f'}{$f}) {
$out .= $$self{'data'}{'f'}{$f};
next;
}
my ($val,$pad,$len,$dow);
if (exists $pad_0{$f}) {
$pad = '0';
}
if (exists $pad_sp{$f}) {
$pad = ' ';
}
if ($f eq 'G' || $f eq 'W') {
my($yy,$ww) = $dmb->_week_of_year(1,[$y,$m,$d]);
if ($f eq 'G') {
$val = $yy;
$len = 4;
} else {
$val = $ww;
$len = 2;
}
}
if ($f eq 'L' || $f eq 'U') {
my($yy,$ww) = $dmb->_week_of_year(7,[$y,$m,$d]);
if ($f eq 'L') {
$val = $yy;
$len = 4;
} else {
$val = $ww;
$len = 2;
}
}
if ($f eq 'Y' || $f eq 'y') {
$val = $y;
$len = 4;
}
if ($f eq 'm' || $f eq 'f') {
$val = $m;
$len = 2;
}
if ($f eq 'd' || $f eq 'e') {
$val = $d;
$len = 2;
}
if ($f eq 'j') {
$val = $dmb->day_of_year([$y,$m,$d]);
$len = 3;
}
if (exists $hr{$f}) {
$val = $h;
if ($f eq 'I' || $f eq 'i') {
$val -= 12 if ($val > 12);
$val = 12 if ($val == 0);
}
$len = 2;
}
if ($f eq 'M') {
$val = $mn;
$len = 2;
}
if ($f eq 'S') {
$val = $s;
$len = 2;
}
if (exists $dow{$f}) {
$dow = $dmb->day_of_week([$y,$m,$d]);
}
###
if (exists $num{$f}) {
while (length($val) < $len) {
$val = "$pad$val";
}
$val = substr($val,2,2) if ($f eq 'y');
} elsif ($f eq 'b' || $f eq 'h') {
$val = $$dmb{'data'}{'wordlist'}{'month_abb'}[$m-1];
} elsif ($f eq 'B') {
$val = $$dmb{'data'}{'wordlist'}{'month_name'}[$m-1];
} elsif ($f eq 'v') {
$val = $$dmb{'data'}{'wordlist'}{'day_char'}[$dow-1];
} elsif ($f eq 'a') {
$val = $$dmb{'data'}{'wordlist'}{'day_abb'}[$dow-1];
} elsif ($f eq 'A') {
$val = $$dmb{'data'}{'wordlist'}{'day_name'}[$dow-1];
} elsif ($f eq 'w') {
$val = $dow;
} elsif ($f eq 'p') {
my $i = ($h >= 12 ? 1 : 0);
$val = $$dmb{'data'}{'wordlist'}{'ampm'}[$i];
} elsif ($f eq 'Z') {
$val = $$self{'data'}{'abb'};
} elsif ($f eq 'N') {
my $off = $$self{'data'}{'offset'};
$val = $dmb->join('offset',$off);
} elsif ($f eq 'z') {
my $off = $$self{'data'}{'offset'};
$val = $dmb->join('offset',$off);
$val =~ s/://g;
$val =~ s/00$//;
} elsif ($f eq 'E') {
$val = $$dmb{'data'}{'wordlist'}{'nth_dom'}[$d-1];
} elsif ($f eq 's') {
$val = $self->secs_since_1970_GMT();
} elsif ($f eq 'o') {
my $date2 = $self->new_date();
$date2->parse('1970-01-01 00:00:00');
my $delta = $date2->calc($self);
$val = $delta->printf('%sys');
} elsif ($f eq 'l') {
my $d0 = $self->new_date();
my $d1 = $self->new_date();
$d0->parse('-0:6:0:0:0:0:0'); # 6 months ago
$d1->parse('+0:6:0:0:0:0:0'); # in 6 months
$d0 = $d0->value();
$d1 = $d1->value();
my $date = $self->value();
if ($date lt $d0 || $date ge $d1) {
$in = '%b %e %Y' . $in;
} else {
$in = '%b %e %H:%M' . $in;
}
$val = '';
} elsif ($f eq 'c') {
$in = '%a %b %e %H:%M:%S %Y' . $in;
$val = '';
} elsif ($f eq 'C' || $f eq 'u') {
$in = '%a %b %e %H:%M:%S %Z %Y' . $in;
$val = '';
} elsif ($f eq 'g') {
$in = '%a, %d %b %Y %H:%M:%S %Z' . $in;
$val = '';
} elsif ($f eq 'D') {
$in = '%m/%d/%y' . $in;
$val = '';
} elsif ($f eq 'r') {
$in = '%I:%M:%S %p' . $in;
$val = '';
} elsif ($f eq 'R') {
$in = '%H:%M' . $in;
$val = '';
} elsif ($f eq 'T' || $f eq 'X') {
$in = '%H:%M:%S' . $in;
$val = '';
} elsif ($f eq 'V') {
$in = '%m%d%H%M%y' . $in;
$val = '';
} elsif ($f eq 'Q') {
$in = '%Y%m%d' . $in;
$val = '';
} elsif ($f eq 'q') {
$in = '%Y%m%d%H%M%S' . $in;
$val = '';
} elsif ($f eq 'P') {
$in = '%Y%m%d%H:%M:%S' . $in;
$val = '';
} elsif ($f eq 'O') {
$in = '%Y-%m-%dT%H:%M:%S' . $in;
$val = '';
} elsif ($f eq 'F') {
$in = '%A, %B %e, %Y' . $in;
$val = '';
} elsif ($f eq 'K') {
$in = '%Y-%j' . $in;
$val = '';
} elsif ($f eq 'x') {
if ($dmb->_config('dateformat') eq 'US') {
$in = '%m/%d/%y' . $in;
} else {
$in = '%d/%m/%y' . $in;
}
$val = '';
} elsif ($f eq 'J') {
$in = '%G-W%W-%w' . $in;
$val = '';
} elsif ($f eq 'n') {
$val = "\n";
} elsif ($f eq 't') {
$val = "\t";
} else {
$val = $f;
}
if ($val ne '') {
$$self{'data'}{'f'}{$f} = $val;
$out .= $val;
}
}
push(@out,$out);
}
if (wantarray) {
return @out;
} elsif (@out == 1) {
return $out[0];
}
return ''
}
}
########################################################################
# EVENT METHODS
sub list_events {
my($self,@args) = @_;
if ($$self{'err'} || ! $$self{'data'}{'set'}) {
warn "WARNING: [list_events] Object must contain a valid date\n";
return undef;
}
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
# Arguments
my($date,$day,$format);
if (@args && $args[$#args] eq 'dates') {
pop(@args);
$format = 'dates';
} else {
$format = 'std';
}
if (@args && $#args==0 && ref($args[0]) eq 'Date::Manip::Date') {
$date = $args[0];
} elsif (@args && $#args==0 && $args[0]==0) {
$day = 1;
} elsif (@args) {
warn "ERROR: [list_events] unknown argument list\n";
return [];
}
# Get the beginning/end dates we're looking for events in
my($beg,$end);
if ($date) {
$beg = $self;
$end = $date;
} elsif ($day) {
$beg = $self->new_date();
$end = $self->new_date();
my($y,$m,$d) = $self->value();
$beg->set('date',[$y,$m,$d,0,0,0]);
$end->set('date',[$y,$m,$d,23,59,59]);
} else {
$beg = $self;
$end = $self;
}
if ($beg->cmp($end) == 1) {
my $tmp = $beg;
$beg = $end;
$end = $tmp;
}
# We need to get a list of all events which may apply.
my($y0) = $beg->value();
my($y1) = $end->value();
foreach my $y ($y0..$y1) {
$self->_events_year($y);
}
my @events = ();
foreach my $i (keys %{ $$dmb{'data'}{'events'} }) {
my $event = $$dmb{'data'}{'events'}{$i};
my $type = $$event{'type'};
my $name = $$event{'name'};
if ($type eq 'specified') {
my $d0 = $$dmb{'data'}{'events'}{$i}{'beg'};
my $d1 = $$dmb{'data'}{'events'}{$i}{'end'};
push @events,[$d0,$d1,$name];
} elsif ($type eq 'ym' || $type eq 'date') {
foreach my $y ($y0..$y1) {
if (exists $$dmb{'data'}{'events'}{$i}{$y}) {
my($d0,$d1) = @{ $$dmb{'data'}{'events'}{$i}{$y} };
push @events,[$d0,$d1,$name];
}
}
} elsif ($type eq 'recur') {
my $rec = $$dmb{'data'}{'events'}{$i}{'recur'};
my $del = $$dmb{'data'}{'events'}{$i}{'delta'};
my @d = $rec->dates($beg,$end);
foreach my $d0 (@d) {
my $d1 = $d0->calc($del);
push @events,[$d0,$d1,$name];
}
}
}
# Next we need to see which ones apply.
my @tmp;
foreach my $e (@events) {
my($d0,$d1,$name) = @$e;
push(@tmp,$e) if ($beg->cmp($d1) != 1 &&
$end->cmp($d0) != -1);
}
# Now format them...
if ($format eq 'std') {
@events = sort { $$a[0]->cmp($$b[0]) ||
$$a[1]->cmp($$b[1]) ||
$$a[2] cmp $$b[2] } @tmp;
} elsif ($format eq 'dates') {
my $p1s = $self->new_delta();
$p1s->parse('+0:0:0:0:0:0:1');
@events = ();
my (@tmp2);
foreach my $e (@tmp) {
my $name = $$e[2];
if ($$e[0]->cmp($beg) == -1) {
# Event begins before the start
push(@tmp2,[$beg,'+',$name]);
} else {
push(@tmp2,[$$e[0],'+',$name]);
}
my $d1 = $$e[1]->calc($p1s);
if ($d1->cmp($end) == -1) {
# Event ends before the end
push(@tmp2,[$d1,'-',$name]);
}
}
return () if (! @tmp2);
@tmp2 = sort { $$a[0]->cmp($$b[0]) ||
$$a[1] cmp $$b[1] ||
$$a[2] cmp $$b[2] } @tmp2;
# @tmp2 is now:
# ( [ DATE1, OP1, NAME1 ], [ DATE2, OP2, NAME2 ], ... )
# which is sorted by date.
my $d = $tmp2[0]->[0];
if ($beg->cmp($d) != 0) {
push(@events,[$beg]);
}
my %e;
while (1) {
# If the first element is the same date as we're
# currently working with, just perform the operation
# and remove it from the list. If the list is not empty,
# we'll proceed to the next element.
my $d0 = $tmp2[0]->[0];
if ($d->cmp($d0) == 0) {
my $e = shift(@tmp2);
my $op = $$e[1];
my $n = $$e[2];
if ($op eq '+') {
$e{$n} = 1;
} else {
delete $e{$n};
}
next if (@tmp2);
}
# We need to store the existing %e.
my @n = sort keys %e;
push(@events,[$d,@n]);
# If the list is empty, we're done. Otherwise, we need to
# reset the date and continue.
last if (! @tmp2);
$d = $tmp2[0]->[0];
}
}
return @events;
}
# The events of type date and ym are determined on a year-by-year basis
#
sub _events_year {
my($self,$y) = @_;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
my $tz = $dmt->_now('tz',1);
return if (exists $$dmb{'data'}{'eventyears'}{$y});
$self->_event_objs() if (! $$dmb{'data'}{'eventobjs'});
my $d = $self->new_date();
$d->config('forcedate',"${y}-01-01-00:00:00,$tz");
my $hrM1 = $d->new_delta();
$hrM1->set('delta',[0,0,0,0,0,59,59]);
my $dayM1 = $d->new_delta();
$dayM1->set('delta',[0,0,0,0,23,59,59]);
foreach my $i (keys %{ $$dmb{'data'}{'events'} }) {
my $event = $$dmb{'data'}{'events'}{$i};
my $type = $$event{'type'};
if ($type eq 'ym') {
my $beg = $$event{'beg'};
my $end = $$event{'end'};
my $d0 = $d->new_date();
$d0->parse_date($beg);
$d0->set('time',[0,0,0]);
my $d1;
if ($end) {
$d1 = $d0->new_date();
$d1->parse_date($end);
$d1->set('time',[23,59,59]);
} else {
$d1 = $d0->calc($dayM1);
}
$$dmb{'data'}{'events'}{$i}{$y} = [ $d0,$d1 ];
} elsif ($type eq 'date') {
my $beg = $$event{'beg'};
my $end = $$event{'end'};
my $del = $$event{'delta'};
my $d0 = $d->new_date();
$d0->parse($beg);
my $d1;
if ($end) {
$d1 = $d0->new_date();
$d1->parse($end);
} elsif ($del) {
$d1 = $d0->calc($del);
} else {
$d1 = $d0->calc($hrM1);
}
$$dmb{'data'}{'events'}{$i}{$y} = [ $d0,$d1 ];
}
}
}
# This parses the raw event list. It only has to be done once.
#
sub _event_objs {
my($self) = @_;
my $dmt = $$self{'tz'};
my $dmb = $$dmt{'base'};
# Only parse once.
$$dmb{'data'}{'eventobjs'} = 1;
my $hrM1 = $self->new_delta();
$hrM1->set('delta',[0,0,0,0,0,59,59]);
my $M1 = $self->new_delta();
$M1->set('delta',[0,0,0,0,0,0,-1]);
my @tmp = @{ $$dmb{'data'}{'sections'}{'events'} };
my $i = 0;
while (@tmp) {
my $string = shift(@tmp);
my $name = shift(@tmp);
my @event = split(/\s*;\s*/,$string);
if ($#event == 0) {
# YMD/YM
my $d1 = $self->new_date();
my $err = $d1->parse_date($event[0]);
if (! $err) {
if ($$d1{'data'}{'def'}[0] eq '') {
# YM
$$dmb{'data'}{'events'}{$i++} = { 'type' => 'ym',
'name' => $name,
'beg' => $event[0] };
} else {
# YMD
my $d2 = $d1->new_date();
my ($y,$m,$d) = $d1->value();
$d1->set('time',[0,0,0]);
$d2->set('date',[$y,$m,$d,23,59,59]);
$$dmb{'data'}{'events'}{$i++} = { 'type' => 'specified',
'name' => $name,
'beg' => $d1,
'end' => $d2 };
}
next;
}
# Date
$err = $d1->parse($event[0]);
if (! $err) {
if ($$d1{'data'}{'def'}[0] eq '') {
# Date (no year)
$$dmb{'data'}{'events'}{$i++} = { 'type' => 'date',
'name' => $name,
'beg' => $event[0],
'delta' => $hrM1
};
} else {
# Date (year)
my $d2 = $d1->calc($hrM1);
$$dmb{'data'}{'events'}{$i++} = { 'type' => 'specified',
'name' => $name,
'beg' => $d1,
'end' => $d2
};
}
next;
}
# Recur
my $r = $self->new_recur();
$err = $r->parse($event[0]);
if ($err) {
warn "ERROR: invalid event definition (must be Date, YMD, YM, or Recur)\n"
. " $string\n";
next;
}
my @d = $r->dates();
if (@d) {
foreach my $d (@d) {
my $d2 = $d->calc($hrM1);
$$dmb{'data'}{'events'}{$i++} = { 'type' => 'specified',
'name' => $name,
'beg' => $d1,
'end' => $d2
};
}
} else {
$$dmb{'data'}{'events'}{$i++} = { 'type' => 'recur',
'name' => $name,
'recur' => $r,
'delta' => $hrM1
};
}
} elsif ($#event == 1) {
my($o1,$o2) = @event;
# YMD;YMD
# YM;YM
my $d1 = $self->new_date();
my $err = $d1->parse_date($o1);
if (! $err) {
my $d2 = $self->new_date();
$err = $d2->parse_date($o2);
if ($err) {
warn "ERROR: invalid event definition (must be YMD;YMD or YM;YM)\n"
. " $string\n";
next;
} elsif ($$d1{'data'}{'def'}[0] ne $$d2{'data'}{'def'}[0]) {
warn "ERROR: invalid event definition (YMD;YM or YM;YMD not allowed)\n"
. " $string\n";
next;
}
if ($$d1{'data'}{'def'}[0] eq '') {
# YM;YM
$$dmb{'data'}{'events'}{$i++} = { 'type' => 'ym',
'name' => $name,
'beg' => $o1,
'end' => $o2
};
} else {
# YMD;YMD
$d1->set('time',[0,0,0]);
$d2->set('time',[23,59,59]);
$$dmb{'data'}{'events'}{$i++} = { 'type' => 'specified',
'name' => $name,
'beg' => $d1,
'end' => $d2 };
}
next;
}
# Date;Date
# Date;Delta
$err = $d1->parse($o1);
if (! $err) {
my $d2 = $self->new_date();
$err = $d2->parse($o2,'nodelta');
if (! $err) {
# Date;Date
if ($$d1{'data'}{'def'}[0] ne $$d2{'data'}{'def'}[0]) {
warn "ERROR: invalid event definition (year must be absent or\n"
. " included in both dats in Date;Date)\n"
. " $string\n";
next;
}
if ($$d1{'data'}{'def'}[0] eq '') {
# Date (no year)
$$dmb{'data'}{'events'}{$i++} = { 'type' => 'date',
'name' => $name,
'beg' => $o1,
'end' => $o2
};
} else {
# Date (year)
$$dmb{'data'}{'events'}{$i++} = { 'type' => 'specified',
'name' => $name,
'beg' => $d1,
'end' => $d2
};
}
next;
}
# Date;Delta
my $del = $self->new_delta();
$err = $del->parse($o2);
if ($err) {
warn "ERROR: invalid event definition (must be Date;Date or\n"
. " Date;Delta) $string\n";
next;
}
$del = $del->calc($M1);
if ($$d1{'data'}{'def'}[0] eq '') {
# Date (no year)
$$dmb{'data'}{'events'}{$i++} = { 'type' => 'date',
'name' => $name,
'beg' => $o1,
'delta' => $del
};
} else {
# Date (year)
$d2 = $d1->calc($del);
$$dmb{'data'}{'events'}{$i++} = { 'type' => 'specified',
'name' => $name,
'beg' => $d1,
'end' => $d2
};
}
next;
}
# Recur;Delta
my $r = $self->new_recur();
$err = $r->parse($o1);
my $del = $self->new_delta();
if (! $err) {
$err = $del->parse($o2);
}
if ($err) {
warn "ERROR: invalid event definition (must be Date;Date, YMD;YMD, "
. " YM;YM, Date;Delta, or Recur;Delta)\n"
. " $string\n";
next;
}
$del = $del->calc($M1);
my @d = $r->dates();
if (@d) {
foreach my $d1 (@d) {
my $d2 = $d1->calc($del);
$$dmb{'data'}{'events'}{$i++} = { 'type' => 'specified',
'name' => $name,
'beg' => $d1,
'end' => $d2
};
}
} else {
$$dmb{'data'}{'events'}{$i++} = { 'type' => 'recur',
'name' => $name,
'recur' => $r,
'delta' => $del
};
}
} else {
warn "ERROR: invalid event definition\n"
. " $string\n";
next;
}
}
}
1;
# Local Variables:
# mode: cperl
# indent-tabs-mode: nil
# cperl-indent-level: 3
# cperl-continued-statement-offset: 2
# cperl-continued-brace-offset: 0
# cperl-brace-offset: 0
# cperl-brace-imaginary-offset: 0
# cperl-label-offset: 0
# End:
| 28.729379 | 88 | 0.399644 |
ed7cbf893ccc950fcb38cbd230e46de50a3a5e34
| 469 |
pm
|
Perl
|
www/perl/perl-5.8.8/ext/Thread/Thread/Specific.pm
|
fcrwx/velocal
|
2c0b08aeb2cd6375ca99556b66a6a3271e17ee4e
|
[
"Apache-2.0"
] | null | null | null |
www/perl/perl-5.8.8/ext/Thread/Thread/Specific.pm
|
fcrwx/velocal
|
2c0b08aeb2cd6375ca99556b66a6a3271e17ee4e
|
[
"Apache-2.0"
] | null | null | null |
www/perl/perl-5.8.8/ext/Thread/Thread/Specific.pm
|
fcrwx/velocal
|
2c0b08aeb2cd6375ca99556b66a6a3271e17ee4e
|
[
"Apache-2.0"
] | null | null | null |
package Thread::Specific;
our $VERSION = '1.00';
=head1 NAME
Thread::Specific - thread-specific keys
=head1 SYNOPSIS
use Thread::Specific;
my $k = key_create Thread::Specific;
=head1 DESCRIPTION
C<key_create> returns a unique thread-specific key.
=cut
sub import : locked : method {
require fields;
fields::->import(@_);
}
sub key_create : locked : method {
our %FIELDS; # suppress "used only once"
return ++$FIELDS{__MAX__};
}
1;
| 15.129032 | 51 | 0.66951 |
ed779ab8465d470e6d1997b6700d126fb9f5f806
| 733 |
pm
|
Perl
|
t/lib/Mock/Inflate/Schema.pm
|
kentaro/p5-Teng
|
e12ec4ade916b0fbab23d1cc374b3703cf8d6a42
|
[
"Artistic-1.0-cl8"
] | 1 |
2016-04-24T11:10:03.000Z
|
2016-04-24T11:10:03.000Z
|
t/lib/Mock/Inflate/Schema.pm
|
kentaro/p5-Teng
|
e12ec4ade916b0fbab23d1cc374b3703cf8d6a42
|
[
"Artistic-1.0-cl8"
] | null | null | null |
t/lib/Mock/Inflate/Schema.pm
|
kentaro/p5-Teng
|
e12ec4ade916b0fbab23d1cc374b3703cf8d6a42
|
[
"Artistic-1.0-cl8"
] | null | null | null |
package Mock::Inflate::Schema;
use strict;
use warnings;
use Teng::Schema::Declare;
use Mock::Inflate::Name;
table {
name 'mock_inflate';
pk 'id';
columns qw/ id name foo /;
inflate 'name' => sub {
my ($col_value) = @_;
return Mock::Inflate::Name->new(name => $col_value);
};
deflate 'name' => sub {
my ($col_value) = @_;
return ref $col_value ? $col_value->name : $col_value . '_deflate';
};
inflate qr/.+oo/ => sub {
my ($col_value) = @_;
return Mock::Inflate::Name->new(name => $col_value);
};
deflate qr/.+oo/ => sub {
my ($col_value) = @_;
return ref $col_value ? $col_value->name : $col_value . '_deflate';
};
};
1;
| 23.645161 | 75 | 0.545703 |
ed2c68edcfea386bbf8075e004836577660f151e
| 760 |
t
|
Perl
|
gnu/usr.bin/perl/cpan/Digest-SHA/t/sha1.t
|
ArrogantWombatics/openbsd-src
|
75721e1d44322953075b7c4b89337b163a395291
|
[
"BSD-3-Clause"
] | 1 |
2019-02-16T13:29:23.000Z
|
2019-02-16T13:29:23.000Z
|
gnu/usr.bin/perl/cpan/Digest-SHA/t/sha1.t
|
ArrogantWombatics/openbsd-src
|
75721e1d44322953075b7c4b89337b163a395291
|
[
"BSD-3-Clause"
] | 1 |
2018-08-21T03:56:33.000Z
|
2018-08-21T03:56:33.000Z
|
gnu/usr.bin/perl/cpan/Digest-SHA/t/sha1.t
|
ArrogantWombaticus/openbsd-src
|
75721e1d44322953075b7c4b89337b163a395291
|
[
"BSD-3-Clause"
] | null | null | null |
use strict;
my $MODULE;
BEGIN {
$MODULE = (-d "src") ? "Digest::SHA" : "Digest::SHA::PurePerl";
eval "require $MODULE" || die $@;
$MODULE->import(qw(sha1_hex));
}
BEGIN {
if ($ENV{PERL_CORE}) {
chdir 't' if -d 't';
@INC = '../lib';
}
}
my @vecs = map { eval } <DATA>;
$#vecs -= 2 if $MODULE eq "Digest::SHA::PurePerl";
my $numtests = scalar(@vecs) / 2;
print "1..$numtests\n";
for (1 .. $numtests) {
my $data = shift @vecs;
my $digest = shift @vecs;
print "not " unless sha1_hex($data) eq $digest;
print "ok ", $_, "\n";
}
__DATA__
"abc"
"a9993e364706816aba3e25717850c26c9cd0d89d"
"abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"
"84983e441c3bd26ebaae4aa1f95129e5e54670f1"
"a" x 1000000
"34aa973cd4c4daa4f61eeb2bdbad27316534016f"
| 20 | 64 | 0.661842 |
ed7ba2b2b65a87b9ddfff07290cc9412fe99d84f
| 4,136 |
pl
|
Perl
|
bin/makeembedhtml.pl
|
steveblackburn/videoabstracts
|
34292f8540fe6bf2580f38f04c1d5b46381e9fe5
|
[
"Apache-2.0"
] | 2 |
2019-04-04T04:15:10.000Z
|
2020-03-24T16:18:29.000Z
|
bin/makeembedhtml.pl
|
steveblackburn/videoabstracts
|
34292f8540fe6bf2580f38f04c1d5b46381e9fe5
|
[
"Apache-2.0"
] | null | null | null |
bin/makeembedhtml.pl
|
steveblackburn/videoabstracts
|
34292f8540fe6bf2580f38f04c1d5b46381e9fe5
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/perl -CS
#
# Generate an embeddable html fragment for each video.
#
# The script takes the conference program as a csv exported from hotcrp
# (note that it is utf16, as titles and authors may contain utf16 characters).
# To do this: in hotcrp, search for accepted papers, then select them all,
# then download "ACM CMS csv". The open the file in excel (or similar), then
# save as UTF-16 txt (which is tab-delimited).
#
# The glog input file is a log of google drive, which allows scripting of the
# hashes used by google drive, which we need to script the generation of links.
# The file looks like row pairs of this form:
#
# [15-06-07 21:30:22:503 AEST] 720p/60.mp4
# [15-06-07 21:30:22:504 AEST] 0B7iRCsp7iT4xLXlEUUZ1SHRvWTg
# [15-06-07 21:30:22:505 AEST] 720p/58.mp4
# [15-06-07 21:30:22:505 AEST] 0B7iRCsp7iT4xV2dydmdBTFZLR0k
#
# The first row gives the file name (including google drive subdirectory)
# the second row in each pair gives the hash.
#
use strict;
use File::Temp qw(tempfile);
use constant GDRIVE_FOLDER => "720p";
use constant PREFIX => "pldi15-";
use constant IDFIELD => 0;
#use constant SESSIONFIELD => 1;
#use constant TALKFIELD => 2;
use constant TITLEFIELD => 3;
use constant AUTHORFIELD => 4;
my %title;
my %authors;
my %session;
my %talknumber;
my %sessionname;
my %sessioncolor;
my %sessionday;
my %sessionstart;
my %sessiontalklen;
my %talksinsession;
my %rowid;
my $talks = shift @ARGV;
my $schedule = shift @ARGV;
my $sessions = shift @ARGV;
my $glog = shift @ARGV;
if ($talks eq "" || $schedule eq "" || $sessions eq "" || $glog eq "") {
die "usage: $0 <conf-utf16.txt> <schedule.csv> <sessions.csv> <google.log>\n";
}
if (!-e $talks) { die "Could not find $talks"; }
if (!-e $schedule) { die "Could not find $schedule"; }
if (!-e $sessions) { die "Could not find $sessions"; }
if (!-e $glog) { die "Could not find $glog"; }
getprogram($talks, $schedule, $sessions);
readglog($glog);
foreach my $s (sort { $a <=> $b} keys %sessionname) {
print $sessionday{$s}."\t$sessionname{$s}\n";
if ($talksinsession{$s}) {
my %talks = %{$talksinsession{$s}};
foreach my $t (sort { $a <=> $b } keys %talks) {
my $p = $talks{$t};
print substr($title{$p}, 0, 40)."... ($sessionname{$s})\n";
my $t = substr($title{$p}, 0, 20)."...";
print "$p\t$title{$p} (PLDI'15)\n";
print "$p\t$t\t<iframe src=\"https://docs.google.com/file/d/".$rowid{$p}."/preview\" width=\"640\" height=\"480\"></iframe>\n";
}
}
}
sub readglog {
my ($glog) = @_;
open my $gfd, '<', $glog or die "Could not open $glog";
my $id;
while (<$gfd>) {
chomp;
if (/.mp4/) {
my $f = GDRIVE_FOLDER;
($id) = /\[.+\]\s+$f\/(\d+).mp4/;
} elsif ($id) {
my ($rid) = /\[.+\]\s+(\S+)/;
$rowid{$id} = $rid;
undef $id;
}
}
}
sub getprogram {
my ($talks, $schedule, $sessions) = @_;
# assumption that we're reading a utf16 saved from excel, after a hotcrp csv export...
open my $talksfd, '<:encoding(UTF-16)', $talks or die "Could not open $talks";
while (<$talksfd>) {
chomp;
my @lines = split /\r/;
foreach my $l (@lines) {
my @fields = split (/\t/, $l);
my ($id) = $fields[0] =~ /-(\d+)$/;
if ($id) {
$title{$id} = $fields[TITLEFIELD];
$authors{$id} = $fields[AUTHORFIELD];
}
}
}
close($talksfd);
open my $schedulefd, '<', $schedule or die "Could not open $schedule";
while (<$schedulefd>) {
chomp;
my @fields = split /,/;
my ($id,$s,$n) = @fields;
$id =~ s/.+[-]//g;
$session{$id} = $s;
$talknumber{$id} = $n;
my %talks = ();
if ($talksinsession{$s}) { %talks = %{$talksinsession{$s}}; }
$talks{$n} = $id;
$talksinsession{$s} = \%talks;
}
close ($schedulefd);
open my $sessionsfd, '<', $sessions or die "Could not open $sessions";
while (<$sessionsfd>) {
chomp;
my @fields = split /,/;
my ($id,$n,$c,$d,$s,$l) = @fields;
if ($id) {
$sessionname{$id} = $n;
$sessioncolor{$id} = $c;
$sessionday{$id} = $d;
$sessionstart{$id} = $s;
$sessiontalklen{$id} = $l;
}
}
close ($sessionsfd);
}
| 28.524138 | 133 | 0.595019 |
ed71983984e6f5bb27ab01c122249d8a1a9966f1
| 2,665 |
pm
|
Perl
|
lib/CatalystX/Usul/Controller/Admin/Locks.pm
|
git-the-cpan/CatalystX-Usul
|
838f36bddbe559a6c44410e50798d86c3b2efd54
|
[
"Artistic-1.0"
] | null | null | null |
lib/CatalystX/Usul/Controller/Admin/Locks.pm
|
git-the-cpan/CatalystX-Usul
|
838f36bddbe559a6c44410e50798d86c3b2efd54
|
[
"Artistic-1.0"
] | null | null | null |
lib/CatalystX/Usul/Controller/Admin/Locks.pm
|
git-the-cpan/CatalystX-Usul
|
838f36bddbe559a6c44410e50798d86c3b2efd54
|
[
"Artistic-1.0"
] | null | null | null |
# @(#)Ident: ;
package CatalystX::Usul::Controller::Admin::Locks;
use strict;
use version; our $VERSION = qv( sprintf '0.17.%d', q$Rev: 1 $ =~ /\d+/gmx );
use CatalystX::Usul::Moose;
use CatalystX::Usul::Constants;
use CatalystX::Usul::Functions qw(throw);
BEGIN { extends q(CatalystX::Usul::Controller) }
__PACKAGE__->config( namespace => q(admin) );
sub lock_table : Chained(common) Args(0) HasActions {
my ($self, $c) = @_;
my $model = $c->model( $self->config_class );
my $lockt = $model->lock->get_table;
$model->add_field( { data => $lockt, select => q(left), type => q(table) } );
$model->group_fields( { id => q(lock_table.select) } );
$lockt->{count} > 0 and $model->add_buttons( qw(Delete) );
return;
}
sub lock_table_delete : ActionFor(lock_table.delete) {
my ($self, $c) = @_; my $s = $c->stash;
my $model = $c->model( $self->config_class );
my $selected = $model->query_array( 'table' );
$selected->[ 0 ] or throw 'Nothing selected';
for my $key (@{ $selected } ) {
$model->lock->reset( k => $key )
and $self->log->info
( 'User '.$s->{user}->username." deleted lock ${key}" );
}
return TRUE;
}
__PACKAGE__->meta->make_immutable;
1;
__END__
=pod
=head1 Name
CatalystX::Usul::Controller::Admin::Locks - Manipulate the lock table
=head1 Version
Describes v0.17.$Rev: 1 $
=head1 Synopsis
package YourApp::Controller::Admin;
use CatalystX::Usul::Moose;
BEGIN { extends q(CatalystX::Usul::Controller::Admin) }
__PACKAGE__->build_subcontrollers;
=head1 Description
Displays the lock table and allows individual locks to be selected and
deleted
=head1 Subroutines/Methods
=head2 lock_table
Display the lock table
=head2 lock_table_delete
Deletes the selected locks
=head1 Diagnostics
None
=head1 Configuration and Environment
None
=head1 Dependencies
=over 3
=item L<CatalystX::Usul::Controller>
=back
=head1 Incompatibilities
There are no known incompatibilities in this module
=head1 Bugs and Limitations
There are no known bugs in this module.
Please report problems to the address below.
Patches are welcome
=head1 Author
Peter Flanigan, C<< <Support at RoxSoft.co.uk> >>
=head1 License and Copyright
Copyright (c) 2014 Peter Flanigan. All rights reserved
This program is free software; you can redistribute it and/or modify it
under the same terms as Perl itself. See L<perlartistic>
This program is distributed in the hope that it will be useful,
but WITHOUT WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE
=cut
# Local Variables:
# mode: perl
# tab-width: 3
# End:
| 20.037594 | 80 | 0.692308 |
73d43d8f6c056ec9f2e2681bad43f7737c950a0c
| 12,537 |
pl
|
Perl
|
json-parsing.pl
|
ZykeDev/PrologJSONParser
|
adcd5a0ef2442fb7c15cfe139dc3a73a0e370826
|
[
"MIT"
] | null | null | null |
json-parsing.pl
|
ZykeDev/PrologJSONParser
|
adcd5a0ef2442fb7c15cfe139dc3a73a0e370826
|
[
"MIT"
] | null | null | null |
json-parsing.pl
|
ZykeDev/PrologJSONParser
|
adcd5a0ef2442fb7c15cfe139dc3a73a0e370826
|
[
"MIT"
] | null | null | null |
%%%% -*- Mode: Prolog -*-
%%%% Author: Marco Vincenzi
%%%% Matricola: 795694
%%%% json-parsing.pl
% Parsing
json_parse(JSONString, json_obj(O)) :-
% Convert the string into a list of charcodes
string_to_list(JSONString, [H | R]),
parse_value([H | R], json_obj(O), _Rest).
json_parse([H | R], json_obj(O)) :-
parse_value([H | R], json_obj(O), _Rest).
parse(In, json_obj([(K, V) | KVs]), Rest) :-
remove_whitespaces(In, HR),
parse_pair(HR, K, V, VRest),
% if the next char is a comma , continue parsing.
is_comma_char(VRest, CRest),
parse_members(CRest, KVs, Rest).
parse(In, json_obj([(K, V)]), Rest) :-
remove_whitespaces(In, HR),
parse_pair(HR, K, V, VRest),
% if it's a } exit
is_closed_obj_char(VRest, Rest).
parse_members(In, (K, V), Rest) :-
remove_whitespaces(In, HR),
parse_pair(HR, K, V, VRest),
% if it's a } exit
is_closed_obj_char(VRest, Rest).
parse_members(In, [(K, V) | KVs], Rest) :-
remove_whitespaces(In, HR),
parse_pair(HR, K, V, VRest),
% if the next char is a comma , continue parsing.
is_comma_char(VRest, CRest),
parse_members(CRest, KVs, Rest).
parse_pair(HR, K, V, Rest) :-
% Parse the key
parse_key(HR, K, KRest),
% Check that there is a colon : (removes whitespaces)
is_colon_char(KRest, SCRest),
% Parse the corresponding value
parse_value(SCRest, V, Rest).
% String -> Rest, Key
parse_key([H | R], K, Rest) :-
is_DQ_char(H), !,
parse_string_DQ(R, Kchars, Rest),
string_to_list(K, Kchars).
% String -> Rest, Key
parse_key([H | R], K, Rest) :-
is_SQ_char(H), !,
parse_string_SQ(R, Kchars, Rest),
string_to_list(K, Kchars).
% Undefined value
parse_value([U | [N | [D | [E | [F | [I | [N | [E | [D | _]]]]]]]]], _, _) :-
char_code("u", U),
char_code("n", N),
char_code("d", D),
char_code("e", E),
char_code("f", F),
char_code("i", I),
!,
fail.
% String Value (DQ or SQ)
parse_value([H | R], V, Rest) :-
is_DQ_char(H),
!,
parse_string_DQ(R, VChars, Rest),
string_to_list(V, VChars),
is_value(V).
parse_value([H | R], V, Rest) :-
is_SQ_char(H),
!,
parse_string_SQ(R, VChars, Rest),
string_to_list(V, VChars),
is_value(V).
% Number Value
parse_value([H | R], V, Rest) :-
atom_char(A, H),
atom_number(A, N),
number(N),
parse_number([H | R], V, Rest),
is_value(V).
% Negative Number Value
parse_value([HD | [HN | R]], V, Rest) :-
is_dash_char(HD),
!,
atom_char(A, HN),
atom_number(A, N),
number(N),
parse_number([HD | [HN | R]], V, Rest),
is_value(V).
% Explicitly Positive Number Value
parse_value([HP | [HN | R]], V, Rest) :-
is_plus_char(HP),
!,
atom_char(A, HN),
atom_number(A, N),
number(N),
parse_number([HN | R], V, Rest),
is_value(V).
% Array Value
parse_value([H | R], json_array(V), Rest) :-
is_opened_arr_char(H),
parse_array(R, V, Rest).
% Empty Arrays []
parse_value(O, json_array([]), Rest) :-
remove_whitespaces(O, [H | R]),
is_opened_arr_char(H),
remove_whitespaces(R, [RH | Rest]),
is_closed_arr_char(RH).
% JSON Value
parse_value(O, V, Rest) :-
remove_whitespaces(O, [H | R]),
is_opened_obj_char(H),
parse(R, V, Rest).
% Empty JSON objects {}
parse_value(O, json_obj([]), Rest) :-
remove_whitespaces(O, [H | R]),
is_opened_obj_char(H),
remove_whitespaces(R, [RH | Rest]),
is_closed_obj_char(RH).
parse_array(S, [V | Vs], Rest) :-
remove_whitespaces(S, NWS),
parse_value(NWS, V, VR),
is_comma_char(VR, NWVR),
!,
parse_array(NWVR, Vs, Rest).
parse_array(S, [V], Rest) :-
remove_whitespaces(S, NWS),
parse_value(NWS, V, VR),
% Check for ]
remove_whitespaces(VR, NWVR),
is_closed_arr_char(NWVR, Rest).
% DQ
parse_string_DQ([H | R], [H | V], Rest) :-
is_not_DQ(H),
!,
parse_string_DQ(R, V, Rest).
% String end
parse_string_DQ([H | Rest], [], Rest) :-
is_DQ_char(H).
% SQ
parse_string_SQ([H | R], [H | V], Rest) :-
is_not_SQ(H),
!,
parse_string_SQ(R, V, Rest).
% String end
parse_string_SQ([H | Rest], [], Rest) :-
is_SQ_char(H).
is_digit(C) :- char_type(C, digit).
is_string([C | Cs]) :- atom(C), char_type(C, alpha), is_string(Cs).
is_string(S) :- string(S).
is_whitespace(C) :- char_type(C, white).
is_punctuation(C) :- char_type(C, punctuation).
not_is_digit(C) :- is_alpha(C).
not_is_digit(C) :- is_whitespace(C).
json_obj([]).
json_obj([M | Ms]) :-
is_member(M),
json_obj(Ms).
json_array([]).
json_array([E | Es]) :-
is_element(E),
json_array(Es).
json_array([M | Ms]) :-
is_element(M),
json_array(Ms).
is_object(X) :- json_obj(X).
is_object(X) :- json_array(X).
is_member([]).
is_member(X) :- is_pair(X).
is_member([KV | M]) :- is_pair(KV), is_member(M).
is_pair((K, V)) :- is_string(K), is_value(V).
is_value(X) :- is_string(X).
is_value(X) :- number(X).
is_value(X) :- is_object(X).
is_element([]).
is_element([E | Es]) :- is_value(E), is_element(Es).
is_whitespace_char(C) :- char_code(X, C), char_type(X, white).
is_whitespace_char(10).
is_not_whitespace_char(C) :- C \= 20, C \= 10, C \= 9.
is_DQ_char(34).
is_SQ_char(39).
is_not_DQ(C) :- C \= 34.
is_not_SQ(C) :- C \= 39.
is_colon_char(58).
is_comma_char(44).
is_dot_char(46).
is_dot_char([C | R], R) :- is_dot_char(C).
is_dash_char(45).
is_plus_char(43).
is_opened_obj_char(123).
is_closed_obj_char(125).
is_opened_arr_char(91).
is_closed_arr_char(93).
is_closed_arr_char([H | R], R) :- is_closed_arr_char(H).
% Comma
is_comma_char([H | R], Rest) :-
is_whitespace_char(H), !,
is_comma_char(R, Rest).
is_comma_char([H | R], Rest) :-
is_comma_char(H), !,
remove_whitespaces(R, Rest).
% colon
is_colon_char([], _) :- fail.
is_colon_char(In, Rest) :-
remove_whitespaces(In, [RH | RR]),
is_colon_char(RH),
remove_whitespaces(RR, Rest).
% Opened {
is_opened_obj_char([H | R], Rest) :-
is_whitespace_char(H), !,
is_opened_obj_char(R, Rest).
is_opened_obj_char([H | R], Rest) :-
is_opened_obj_char(H), !,
remove_whitespaces(R, Rest).
% Closed }
is_closed_obj_char([H | R], Rest) :-
is_whitespace_char(H), !,
is_closed_obj_char(R, Rest).
is_closed_obj_char([H | R], R) :-
is_closed_obj_char(H).
% Remove whitespaces
remove_whitespaces([], []).
remove_whitespaces([H | R], Rest) :-
is_whitespace_char(H),
remove_whitespaces(R, Rest).
remove_whitespaces([H | R], [H | R]) :-
is_not_whitespace_char(H).
% For negative numbers
parse_number([H | R], V, Rest) :-
is_dash_char(H),
!,
parse_int(R, Vn, Rest),
V is 0 - Vn.
% For explicitly positve numbers
parse_number([H | R], V, Rest) :-
is_plus_char(H),
!,
parse_int(R, V, Rest).
% For positive numbers
parse_number([H | R], V, Rest) :-
parse_int([H | R], V, Rest).
% V is a number
parse_int([H | R], V, Rest) :-
parse_digits([H | R], Vr, Rest),
atom_chars(Va, Vr),
atom_number(Va, V),
number(V).
% eof -> fail
parse_digits([], _, _) :- fail.
% ] -> end
parse_digits([DH | DR], [], [DH | DR]) :-
is_closed_arr_char(DH).
% } -> end
parse_digits([DH | DR], [], [DH | DR]) :-
is_closed_obj_char(DH).
% ws -> end
parse_digits([DH | DR], [], DR) :-
is_whitespace_char(DH).
% Comma -> end
parse_digits([DH | DR], [], [DH | DR]) :-
is_comma_char(DH).
% Dot -> is a float
parse_digits([DH | DR], [DH | Dec], Rest) :-
is_dot_char(DH),
!,
parse_digits(DR, Dec, Rest).
% Digit -> next
parse_digits([DH | DR], V, R) :-
code_type(DH, digit),
!,
parse_digits(DR, Num, R),
append([DH], Num, V).
% If the "Fields" field is empty, return the entire obj.
json_get(V, [], V).
% json_obj -> [(K, V)].
json_get(json_obj([(K, V) | KVs]), Fields, Res) :-
json_get([(K, V) | KVs], Fields, Res).
% If the Key = FH
json_get([(K, V) | _], [FH | FR], Res) :-
% If the Key is equal to the Field.
atom_string(AK, FH),
atom_string(AK, K),
\+ number(FR),
% Go deeper in the Value with the next Field.
json_get(V, FR, Res).
% K =/= FH
json_get([(K, _V) | KVs], [FH | FR], Res) :-
% If the Key is equal to the Field
atom_string(AK, FH),
\+ atom_string(AK, K),
% Go deeper in the Value with the next Field
json_get(KVs, [FH | FR], Res).
% K = F
json_get([(K, V) | _], F, V) :-
% If the Key is equal to the (last) Field.
\+ number(F),
\+ is_list(F),
atom_string(AK, F),
atom_string(AK, K).
% K =/= F
json_get([(K, _V) | KVs], F, Res) :-
\+ number(F),
\+ is_list(F),
atom_string(AK, F),
\+ atom_string(AK, K),
json_get(KVs, F, Res).
% K = FH
% Where FH is an Array and FIndex is its Index.
json_get([(K, json_array(V)) | _KVs], [FH | [FIndex | FR]], Res) :-
atom_string(AK, FH),
atom_string(AK, K),
number(FIndex),
FIndex >= 0,
nth0(FIndex, V, VNext),
FR \= [],
!,
json_get(VNext, FR, Res).
% K = FH
% Where FH is an Array and FIndex is its Index
json_get([(K, json_array(V)) | _KVs], [FH | [FIndex | []]], Res) :-
atom_string(AK, FH),
atom_string(AK, K),
number(FIndex),
FIndex >= 0,
!,
nth0(FIndex, V, Res).
% K = FH
json_get((K, V), [FH | FR], Res) :-
% If the Key is equal to the Field.
atom_string(AK, FH),
atom_string(AK, K),
\+ number(FR),
% Go deeper in the Value with the next Field.
json_get(V, FR, Res).
% K = F
json_get((K, V), F, V) :-
% If the Key is equal to the (last) Field.
\+ number(F),
\+ is_list(F),
atom_string(AK, F),
atom_string(AK, K).
% K = FH
% Where FH is an Array and FIndex is its Index
json_get((K, json_array(V)), [FH | [FIndex | FR]], Res) :-
atom_string(AK, FH),
atom_string(AK, K),
number(FIndex),
FIndex >= 0,
nth0(FIndex, V, VNext),
FR \= [],
!,
json_get(VNext, FR, Res).
% K = FH
% Where FH is an Array and FIndex is its Index
json_get((K, json_array(V)), [FH | [FIndex | []]], Res) :-
atom_string(AK, FH),
atom_string(AK, K),
number(FIndex),
FIndex >= 0,
nth0(FIndex, V, Res).
% File I/O
% Loads a text file from FileName and parses it.
json_load(FileName, JSON) :-
open(FileName, read, In),
read_stream_to_codes(In, Str),
close(In),
json_parse(Str, JSON).
% Saves a JSON object in FileName.
json_write(JSON, FileName) :-
convert_to_JSON(JSON, JSONString),
open(FileName, write, Out),
write(Out, JSONString),
close(Out).
convert_to_JSON(json_obj([]), '{}').
convert_to_JSON(json_obj([(K, V) | KVs]), JSONString) :-
KVs \= [],
!,
convert_pair((K, V), JM),
convert_pair(KVs, JSONRest),
atom_concat("{", JM, JS),
atom_concat(JS, ", ", JS1),
atom_concat(JS1, JSONRest, JS2),
atom_concat(JS2, "}", JSONString).
convert_to_JSON(json_obj([(K, V) | []]), JSONString) :-
convert_pair((K, V), JM),
atom_concat("{", JM, JS),
atom_concat(JS, "}", JSONString).
convert_to_JSON(V, V) :-
number(V).
convert_to_JSON(V, VS) :-
is_string(V),
atom_concat('"', V, VS1),
atom_concat(VS1, '"', VS).
convert_to_JSON(json_array([V | Vs]), VS) :-
atom_concat("", '[', KH),
concat_array_elements(KH, [V | Vs], FinalV),
atom_concat(FinalV, "]", VS).
convert_pair((K, V), KV) :-
is_value(V),
convert_to_JSON(V, JV),
concat_pair(K, JV, KV).
convert_pair((K, json_obj(V)), KV) :-
convert_to_JSON(V, JV),
concat_pair(K, JV, KV).
convert_pair((K, json_array(V)), KV) :-
concat_array(K, V, KV).
convert_pair([(K, V) | []], KV) :-
convert_pair((K, V), KV).
convert_pair([(K, V) | KVs], KV) :-
convert_pair((K, V), KV1),
convert_pair(KVs, KV2),
atom_concat(KV1, ", ", KVC),
atom_concat(KVC, KV2, KV).
% Concatenates a pair (k : v).
concat_pair(K, JV, KV) :-
atom_concat('"', K, K1),
atom_concat(K1, '" : ', K2),
atom_concat(K2, JV, KV).
concat_array(K, JV, KV) :-
atom_concat('"', K, K1),
atom_concat(K1, '" : ', K2),
atom_concat(K2, '[', KH),
concat_array_elements(KH, JV, FinalV),
atom_concat(FinalV, "]", KV).
% Concatenates array elements with commas.
concat_array_elements(S, V, V1) :-
convert_to_JSON(V, VJ),
atom_concat(S, VJ, V1).
concat_array_elements(S, [VH | VR], V) :-
convert_to_JSON(VH, VHJ),
atom_concat(S, VHJ, V1),
atom_concat(V1, ", ", V2),
concat_array_elements(V2, VR, V).
% end of file -- json-parsing.pl
| 20.352273 | 77 | 0.588418 |
ed5d3c0c4971aea53b4e0f7645edc809e1bba542
| 25,468 |
pm
|
Perl
|
scripts/import/SARA/GetSARA.pm
|
fergalmartin/ensembl-variation
|
858de3ee083fd066bc0b8a78e8a449176dd51bce
|
[
"Apache-2.0"
] | null | null | null |
scripts/import/SARA/GetSARA.pm
|
fergalmartin/ensembl-variation
|
858de3ee083fd066bc0b8a78e8a449176dd51bce
|
[
"Apache-2.0"
] | 1 |
2020-04-20T12:11:56.000Z
|
2020-04-20T12:11:56.000Z
|
scripts/import/SARA/GetSARA.pm
|
dglemos/ensembl-variation
|
7cd20531835b45b1842476606b4fd0856e3843e0
|
[
"Apache-2.0"
] | null | null | null |
=head1 LICENSE
Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Copyright [2016-2018] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
use strict;
=head1 CONTACT
Please email comments or questions to the public Ensembl
developers list at <http://lists.ensembl.org/mailman/listinfo/dev>.
Questions may also be sent to the Ensembl help desk at
<http://www.ensembl.org/Help/Contact>.
=cut
use warnings;
#generic object for the dbSNP data. Contains the general methods to dump the data into the new Variation database. Any change in the methods
# will need to overload the correspondent method in the subclass for the specie
package SARA::GetSARA;
use Bio::Index::Fastq;
use Bio::EnsEMBL::Utils::Argument qw(rearrange);
use Bio::EnsEMBL::Utils::Exception qw(throw);
use Bio::EnsEMBL::Utils::Sequence qw(reverse_comp);
use ImportUtils qw(dumpSQL debug create_and_load load);
use Time::HiRes qw(tv_interval gettimeofday);
use Getopt::Long;
# configure these before running!!!
our $ssahabuild;
our $ssaha2;
our $getseqreads;
our $ssahaSNP_cons;
our $search_read;
our $fastq_dir;
our $output_dir;
our $target_dir;
die("Can't run without variables set - please edit script") unless defined($ssahaSNP_cons) and defined($getseqreads) and defined($search_read) and defined($fastq_dir) and defined($output_dir) and defined($target_dir);
#creates the object and assign the attributes to it (connections, basically)
sub new {
my $caller = shift;
my $class = ref($caller) || $caller;
my ($dbCore, $dbVar, $dbSara, $tmp_dir, $tmp_file, $species, $source_id) =
rearrange([qw(DBCORE DBVAR DBSARA TMPDIR TMPFILE SPECIES SOURCE_ID)],@_);
return bless {'dbSara' => $dbSara,
'dbCore' => $dbCore,
'dbVar' => $dbVar, ##this is a dbconnection
'tmpdir' => $tmp_dir,
'tmpfile' => $tmp_file,
'species' => $species,
'source_id' => $source_id,
}, $class;
}
#main and only function in the object that dumps all dbSNP data
sub get_sara{
my $self = shift;
my $var_dbname = ($self->{'dbVar'}) ? $self->{'dbVar'}->dbname : "var_dbname";
my %rec_seq_region_id;
my $sth = $self->{'dbSara'}->prepare(qq{SELECT distinct seq_region_id
FROM $var_dbname.variation_feature
});
my ($seq_region_id);
$sth->execute();
$sth->bind_columns(\$seq_region_id);
while ($sth->fetch()) {
$rec_seq_region_id{$seq_region_id}=1;
}
#can't built index_file for human in lustre file system, ask Tim? (need 2-3 days), so need to use turing for flanking_qual part of the comput
my $queue_hugemem = "-q hugemem -R'select[mem>5000] rusage[mem=5000]'";
my $queue_linux64 = "-q normal -R'select[type == LINUX64 && myens_genomics2 < 200] rusage[myens_genomics1=10]'";
my $queue_long = "-q long -M5000000 -R'select[mem>5000] rusage[mem=5000]'";
my $queue;
$queue = $queue_hugemem if $self->{'tmpdir'} =~ /turing/;
$queue = $queue_long if $self->{'tmpdir'} !~ /turing/;
#$queue = $queue_linux64 if $self->{'tmpdir'} !~ /turing/;
#my $t0 = [gettimeofday];
#debug("Time to start get_query_snp_pos $t0");
#$self->get_query_snp_pos(\%rec_seq_region_id,$var_dbname);
#my $t1 = [gettimeofday];
#debug("Time to start get_flanking_seq_qual $t1");
#$self->make_reads_file(\%rec_seq_region_id,$queue);
#$self->make_pileup_reads_file();
#$self->get_flanking_seq_qual(\%rec_seq_region_id,$var_dbname,$queue);
#print "Time to run GetSARA ", tv_interval($t0,$t1),"\n";
#$self->make_genotype_allele_table(\%rec_seq_region_id,$var_dbname,$queue);
#$self->merge_tables();
#$self->insert_allele_gtype_to_vardb(\%rec_seq_region_id,$var_dbname);
#$self->remove_faulse_variation_in_multiple_strains();
#$self->read_coverage($var_dbname);##this is been calculated ealier
#$self->remove_empty_tables();
$self->remove_tables();
#$self->merge_table_with_same_seq_region_id();
}
sub get_query_snp_pos {
my $self = shift;
my $rec_seq_region_id = shift;
my $var_dbname = shift;
my $source_id = $self->{'source_id'};
my $tmp_dir = $self->{'tmpdir'};
my $tmp_file = $self->{'tmpfile'};
my $count;
my $species = $self->{'species'};
foreach my $seq_region_id (keys %$rec_seq_region_id) {
#foreach my $seq_region_id (226032,226034) {
my $call = "bsub -q long -R'select[myens_genomics1 < 200] rusage[myens_genomics1=10]' -J $var_dbname\_ssaha_feature_job_$seq_region_id -o $tmp_dir/ssaha_feature_out\_$seq_region_id perl parallel_sara_zm.pl -species $species -source_id $source_id -seq_region_id $seq_region_id -job snp_pos -tmpdir $tmp_dir -tmpfile $tmp_file";
$count++;
print "call is $call $count\n";
system($call);
}
my $call1 = "bsub -q normal -K -w 'done($var_dbname\_ssaha_feature_job*)' -J waiting_process sleep 1"; #waits until all variation features have finished to continue
system($call1);
}
sub get_flanking_seq_qual {
##use farm run smaller jobs or use turing run all jobs
my $self = shift;
my $rec_seq_region_id = shift;
my $var_dbname = shift;
my $queue = shift;
my $species = $self->{'species'};
my $tmp_dir = $self->{'tmpdir'};
my $tmp_file = $self->{'tmpfile'};
my $count;
my $defined_table_row = 100000;
my $reads_dir;
$reads_dir = "[reads_dir]/reads_out";
#job 1 somehow failed with memory 7 GB, so sent to turing, but only maxmem 3GB
my %rec_id_name;
#note the read_file is seq_region_name rather than seq_region_id,so use this
#my $sth = $self->{'dbSara'}->prepare(qq{SELECT seq_region_id,seq_region_name
# FROM $var_dbname.tmp_seq_region
# });
#my ($seq_region_id,$seq_region_name);
#$sth->execute(); $sth->bind_columns(\$seq_region_id,\$seq_region_name);
#while ($sth->fetch()) {
# $rec_id_name{$seq_region_id}=$seq_region_name;
#}
my $call;
foreach my $seq_region_id (keys %$rec_seq_region_id) {
#foreach my $seq_region_id (226032,226034) {
my $row_count_ref = $self->{'dbSara'}->db_handle->selectall_arrayref(qq{SELECT COUNT(*) FROM snp_pos_$seq_region_id});
my $row_count = $row_count_ref->[0][0];
if ($row_count > $defined_table_row) {
#my $reads_file = "$reads_dir/reads_out_$rec_id_name{$seq_region_id}";
my $reads_file = "$reads_dir/reads_out_$seq_region_id";
$call = "bsub $queue -J $var_dbname\_ssaha_flank_job_$seq_region_id -o $tmp_dir/ssaha_flank_out\_$seq_region_id perl parallel_sara_zm.pl -species $species -seq_region_id $seq_region_id -job flank -reads_file $reads_file -tmpdir $tmp_dir -tmpfile $tmp_file";
#$call = "bsub $queue -J $var_dbname\_ssaha_flank_job_$seq_region_id -o $tmp_dir/ssaha_flank_out\_$seq_region_id perl parallel_sara_feature.pl -species $species -seq_region_id $seq_region_id -job flank -index_file $index_file -tmpdir $tmp_dir -tmpfile $tmp_file";
}
else {
my $reads_file = "$reads_dir/reads_out_small";
$call = "bsub $queue -J $var_dbname\_ssaha_flank_job_$seq_region_id -o $tmp_dir/ssaha_flank_out\_$seq_region_id perl parallel_sara_zm.pl -species $species -seq_region_id $seq_region_id -job flank -reads_file $reads_file -tmpdir $tmp_dir -tmpfile $tmp_file";
}
$count++;
print "call is $call $count\n";
system($call);
sleep(5);
}
my $call1 = "bsub -q normal -K -w 'done($var_dbname\_ssaha_flank_job*)' -J waiting_process sleep 1"; #waits until all variation features have finished to continue
system($call1);
}
sub make_reads_file {
##use turing run all seq_region_ids
my $self = shift;
my $rec_seq_region_id = shift;
my $queue = shift;
my $tmp_dir = $self->{'tmpdir'};
my $tmp_file = $self->{'tmpfile'};
my $reads_dir = "[reads_dir]";
my $defined_table_row = 100000;
my (@big_seq_region_id,@small_seq_region_id);
foreach my $seq_region_id (keys %$rec_seq_region_id) {
#foreach my $seq_region_id (226052,226062,226046) {
my $row_count_ref = $self->{'dbSara'}->db_handle->selectall_arrayref(qq{SELECT COUNT(*) FROM snp_pos_$seq_region_id});
my $row_count = $row_count_ref->[0][0];
if ($row_count > $defined_table_row) {
push @big_seq_region_id,$seq_region_id;
}
else {
push @small_seq_region_id, $seq_region_id;
}
}
if ($tmp_dir !~ /turing/) {
die "search_read job has to run on turing\n";
}
foreach my $seq_region_id (@big_seq_region_id) {
#foreach my $seq_region_id (226053) {
debug("Dumping reads names for $seq_region_id...");
dumpSQL($self->{'dbSara'},qq{SELECT DISTINCT query_name FROM snp_pos_$seq_region_id});
system("sort $tmp_dir/$tmp_file |uniq >$reads_dir/reads_dir/reads_name_$seq_region_id");
#my $seq_region_id="missed";
###needs 50GB memeory to run search_read (for any size of reads_name)
system("bsub -q hugemem -R'select[mem>50000] rusage[mem=50000]' -J reads_file_$seq_region_id -o $reads_dir/reads_dir/out_reads_file_$seq_region_id $getseqreads $reads_dir/reads_dir/reads_name_$seq_region_id $reads_dir/genome-reads.fastq $reads_dir/reads_dir/reads_out_$seq_region_id");
if ($? == -1) {
print "failed to execute: $!\n";
}
elsif ($? & 127) {
printf "child died with signal %d, %s coredump\n",
($? & 127), ($? & 128) ? 'with' : 'without';
}
else {
printf "child exited with value %d\n", $? >> 8;
}
}
foreach my $seq_region_id (@small_seq_region_id) {
debug("Dumping reads names for $seq_region_id...");
dumpSQL($self->{'dbSara'},qq{SELECT query_name FROM snp_pos_$seq_region_id});
system("sort $tmp_dir/$tmp_file |uniq >>$reads_dir/reads_dir/reads_name_small");
}
###needs 50GB memeory to run search_read (for any size of reads_name)
system("bsub -q hugemem -R'select[mem>50000] rusage[mem=50000]' -J reads_file_small -o $reads_dir/reads_dir/out_reads_file_small $getseqreads $reads_dir/reads_dir/reads_name_small $reads_dir/genome-reads.fastq $reads_dir/reads_dir/reads_out_small");
if ($? == -1) {
print "failed to execute: $!\n";
}
elsif ($? & 127) {
printf "child died with signal %d, %s coredump\n",
($? & 127), ($? & 128) ? 'with' : 'without';
}
else {
printf "child exited with value %d\n", $? >> 8;
}
my $call1 = "bsub -q hugemem -R'select[mem>100] rusage[mem=100]' -K -w 'done(reads_file*)' -J waiting_process sleep 1"; #waits until all variation features have finished to continue
system($call1);
system("scp $reads_dir/reads_dir/reads_out_* [reads_out_dir]");
#return ("$reads_dir/reads_dir/reads_out_$seq_region_id$t\_000.fastq");
}
sub make_pileup_reads_file {
##use turing run all chromosomes
my $self = shift;
my $tmp_dir = $self->{'tmpdir'};
my $tmp_file = $self->{'tmpfile'};
opendir DIR, "$output_dir" or die "Failed to open dir : $!";
my @reads_dirs = grep /dir$/, readdir(DIR);
print "files are @reads_dirs\n";
#foreach my $read_dir (@reads_dirs) {
foreach my $read_dir("1_dir") {
my ($chr) = $read_dir =~ /(\S+)\_dir/;
debug("chr is $chr Get reads fastq for $read_dir...");
###needs 50GB memeory to run search_read (for any size of reads_name)
system("bsub -q hugemem -R'select[mem>60000] rusage[mem=60000]' -J reads_file_$read_dir -o $output_dir/$read_dir/out_reads_file_$chr $search_read -fastq 1 $output_dir/$read_dir/$chr\_read_name $output_dir/$read_dir/reads_out_$chr $fastq_dir/readname.tag $fastq_dir/*fastq");
if ($? == -1) {
print "failed to execute: $!\n";
}
elsif ($? & 127) {
printf "child died with signal %d, %s coredump\n",
($? & 127), ($? & 128) ? 'with' : 'without';
}
else {
printf "child exited with value %d\n", $? >> 8;
}
debug("Running pileup SNP...");
if (! -e "$output_dir/$read_dir/reads_out_$chr\.fastq") {
system("cat $output_dir/$read_dir/reads_out_$chr\_*.fastq >$output_dir/$read_dir/reads_out_$chr\.fastq");
}
system("bsub -q hugemem -M20000000 -R'select[mem>20000] rusage[mem=20000]' -o $output_dir/$read_dir/out_$chr\_SNP $ssahaSNP_cons $output_dir/$read_dir/$chr\_align $output_dir/$read_dir/$chr\_cigar $target_dir/$chr\.fa $output_dir/$read_dir/reads_out_$chr\.fastq");
}
}
sub make_genotype_allele_table {
##use farm run these jobs
my $self = shift;
my $rec_seq_region_id = shift;
my $var_dbname = shift;
my $queue = shift;
my $tmp_dir = $self->{'tmpdir'};
my $tmp_file = $self->{'tmpfile'};
my $count;
my $species = $self->{'species'};
foreach my $seq_region_id (keys %$rec_seq_region_id) {
#foreach my $seq_region_id (226055) {
my $call = "bsub -q normal -R'select[myens_genomics1 < 200] rusage[myens_genomics1=10]' -J $var_dbname\_ssaha_gtype_job_$seq_region_id -o $tmp_dir/ssaha_gtype_out\_$seq_region_id perl parallel_sara_zm.pl -species $species -seq_region_id $seq_region_id -job gtype_allele -tmpdir $tmp_dir -tmpfile $tmp_file";
$count++;
print "call is $call $count\n";
system($call);
}
my $call1 = "bsub -q normal -K -w 'done($var_dbname\_ssaha_gtype_job*)' -J waiting_process sleep 1"; #waits until all variation features have finished to continue
system($call1);
}
sub insert_allele_gtype_to_vardb {
my ($self, $rec_seq_region_id, $var_dbname) = @_;
$self->{'dbVar'}->do(qq{ALTER TABLE allele add unique index unique_allele_idx(variation_id,allele,sample_id)});
$self->{'dbVar'}->do(qq{CREATE TABLE tmp_individual_genotype_single_bp (
variation_id int not null,allele_1 varchar(255),allele_2 varchar(255),sample_id int,
key variation_idx(variation_id),
key sample_idx(sample_id)
) MAX_ROWS = 100000000}
);
$self->{'dbVar'}->do(qq{CREATE UNIQUE INDEX ind_genotype_idx ON tmp_individual_genotype_single_bp(variation_id,sample_id,allele_1,allele_2)});
debug("Insert individual allele first...");
$self->{'dbSara'}->do(qq{INSERT IGNORE INTO $var_dbname.allele (variation_id,allele,sample_id) select ga.variation_id,ga.allele,ip.population_sample_id from gtype_allele ga, $var_dbname.sample s, $var_dbname.individual_population ip where ga.sample_name = s.name and s.sample_id = ip.individual_sample_id});
debug("Then insert reference allele...");
$self->{'dbSara'}->do(qq{INSERT IGNORE INTO $var_dbname.allele (variation_id,allele,sample_id) select ga.variation_id,ga.allele,s.sample_id from gtype_allele ga, $var_dbname.sample s where ga.sample_name = s.name and s.name like "ENS%"}) ;
debug("insert into tmp_genotype table...");
$self->{'dbSara'}->do(qq{INSERT IGNORE INTO $var_dbname.tmp_individual_genotype_single_bp
(variation_id,allele_1,allele_2,sample_id) select ig.variation_id,ig.allele_1,ig.allele_2,s.sample_id from tmp_individual_genotype_single_bp ig, $var_dbname.sample s where ig.sample_name = s.name});
$self->{'dbVar'}->do(qq{DROP INDEX unique_allele_idx ON allele});
$self->{'dbVar'}->do(qq{DROP INDEX ind_genotype_idx ON tmp_individual_genotype_single_bp});
#delete entries from variation, variation_feature and flanking_sequence tables that variation_id is not in allele and tmp_genotype_single_bp table
#$self->{'dbSara'}->do(qq{CREATE TABLE variation_old SELECT * FROM $var_dbname.variation});
#$self->{'dbSara'}->do(qq{CREATE TABLE variation_feature_old SELECT * FROM $var_dbname.variation_feature});
#$self->{'dbSara'}->do(qq{CREATE TABLE flanking_sequence_old SELECT * FROM $var_dbname.flanking_sequence});
$self->{'dbVar'}->do(qq{CREATE TABLE uniq_var_id_tmp_gtype SELECT DISTINCT variation_id FROM tmp_individual_genotype_single_bp});
$self->{'dbVar'}->do(qq{ALTER TABLE uniq_var_id_tmp_gtype ADD INDEX variation_idx(variation_id)});
debug("delete from variation table...");
$self->{'dbVar'}->do(qq{DELETE FROM v USING variation v LEFT JOIN uniq_var_id_tmp_gtype u ON v.variation_id = u.variation_id
WHERE u.variation_id IS NULL
});
debug("delete from variation_feature table...");
$self->{'dbVar'}->do(qq{DELETE FROM vf USING variation_feature vf LEFT JOIN uniq_var_id_tmp_gtype u ON vf.variation_id = u.variation_id
WHERE u.variation_id IS NULL
});
debug("delete from flanking_sequence table...");
$self->{'dbVar'}->do(qq{DELETE FROM f USING flanking_sequence f LEFT JOIN uniq_var_id_tmp_gtype u ON f.variation_id = u.variation_id
WHERE u.variation_id IS NULL
});
}
sub remove_faulse_variation_in_multiple_strains {
my $self = shift;
$self->{'dbVar'}->do(qq{CREATE TABLE varid_remove
SELECT t.variation_id,group_concat(t.allele_1) as allele_1,group_concat(t.allele_2) as allele_2
FROM tmp_individual_genotype_single_bp t
GROUP BY variation_id
});
$self->{'dbVar'}->do(qq{CREATE TABLE varid_remove1
SELECT * FROM varid_remove
WHERE allele_1=allele_2
});
$self->{'dbVar'}->do(qq{CREATE TABLE varid_same_as_ref
SELECT DISTINCT v.variation_id from varid_remove1 v, variation_feature vf
WHERE substring(v.allele_1,1,1) like substring(vf.allele_string,1,1)
AND substring(v.allele_1,3,1) like substring(vf.allele_string,1,1)
AND substring(v.allele_1,5,1) like substring(vf.allele_string,1,1)
AND v.variation_id=vf.variation_id
AND length(v.allele_1)=5
});
$self->{'dbVar'}->do(qq{INSERT INTO varid_same_as_ref
SELECT DISTINCT v.variation_id from varid_remove1 v, variation_feature vf
WHERE substring(v.allele_1,1,1) like substring(vf.allele_string,1,1)
AND substring(v.allele_1,3,1) like substring(vf.allele_string,1,1)
AND v.variation_id=vf.variation_id
AND length(v.allele_1)=3
});
$self->{'dbVar'}->do(qq{INSERT INTO varid_same_as_ref
SELECT DISTINCT v.variation_id from varid_remove1 v, variation_feature vf
WHERE substring(v.allele_1,1,1) like substring(vf.allele_string,1,1)
AND v.variation_id=vf.variation_id
AND length(v.allele_1)=1
});
$self->{'dbVar'}->do(qq{ALTER TABLE varid_same_as_ref ADD INDEX variation_id(variation_id)});
#remove these variation_id from variation/variation_feature/flanking_sequence/allele/tmp_gtype tables
debug("delete from variation table...");
$self->{'dbVar'}->do(qq{DELETE FROM v USING variation v, varid_same_as_ref u
WHERE v.variation_id = u.variation_id
});
debug("delete from variation_feature table...");
$self->{'dbVar'}->do(qq{DELETE FROM vf USING variation_feature vf, varid_same_as_ref u
WHERE vf.variation_id = u.variation_id
});
debug("delete from flanking_sequence table...");
$self->{'dbVar'}->do(qq{DELETE FROM f USING flanking_sequence f, varid_same_as_ref u
WHERE f.variation_id = u.variation_id
});
debug("delete from allele table...");
$self->{'dbVar'}->do(qq{DELETE FROM v USING allele a, varid_same_as_ref u
WHERE a.variation_id = u.variation_id
});
debug("delete from tmp_individual_genotype_single_bp table...");
$self->{'dbVar'}->do(qq{DELETE FROM v USING tmp_individual_genotype_single_bp t, varid_same_as_ref u
WHERE t.variation_id = u.variation_id
});
debug("Drop tables varid_remove varid_remove1 varid_same_as_ref");
#$self->{'dbVar'}->do(qq{DROP TABLES varid_remove varid_remove1 varid_same_as_ref});
}
sub read_coverage {
#not used here
my $self = shift;
my $var_dbname = shift;
my $tmp_dir = $self->{'tmpdir'};
my $tmp_file = $self->{'tmpfile'};
my $species = $self->{'species'};
my $alignment_file ="[alignment_file]";
my $sth = $self->{'dbCore'}->prepare(qq{SELECT sr.seq_region_id,sr.name
FROM seq_region_attrib sra, attrib_type at, seq_region sr
WHERE sra.attrib_type_id=at.attrib_type_id
AND at.code="toplevel"
AND sr.seq_region_id = sra.seq_region_id
});
my ($seq_region_id,$seq_region_name,%rec_seq_region);
$sth->execute();
$sth->bind_columns(\$seq_region_id,\$seq_region_name);
while ($sth->fetch()) {
$rec_seq_region{$seq_region_id}=$seq_region_name;
}
foreach my $seq_region_id (keys %rec_seq_region) {
my $seq_region_name = $rec_seq_region{$seq_region_id};
my $call = "bsub -q bigmem -R'select[mem>3000] rusage[mem=3000]' -J $var_dbname\_read_coverage_job_$seq_region_id -o /$tmp_dir/read_coverage_out\_$seq_region_id perl parallel_sara_feature.pl -species $species -seq_region_name $seq_region_name -job read_coverage -alignment_file $alignment_file -tmpdir $tmp_dir -tmpfile $tmp_file";
print "call is $call\n";
system($call);
}
my $call1 = "bsub -q normal -K -w 'done($var_dbname\_read_coverage_job*)' -J waiting_process sleep 1"; #waits until all variation features have finished to continue
system($call1);
}
sub merge_tables {
my ($self) = @_;
#foreach my $table ("combine_feature","snp_pos","flanking_qual","gtype_allele","failed_flanking_qual","failed_gtype","tmp_individual_genotype_single_bp") {
foreach my $table ("gtype_allele","failed_gtype","tmp_individual_genotype_single_bp") {
debug("Merge table $table...");
my $single_table_ref = $self->{'dbSara'}->db_handle->selectall_arrayref(qq{SHOW tables like "$table\_%"});
my @tables = map {$_->[0] } @$single_table_ref;
foreach my $t(@tables) {
if ($t eq $table) {
$self->{'dbSara'}->do(qq{DROP TABLE $table});
}
}
$self->{'dbSara'}->do(qq{CREATE TABLE $table like $tables[0]});
my $table_names = join ',',@tables;
#print "table_names is $table_names\n";
$self->{'dbSara'}->do(qq{ALTER TABLE $table engine = merge union($table_names) insert_method=last});
}
}
sub remove_tables {
my ($self) = @_;
#foreach my $table ("combine_feature","snp_pos","failed_flanking_qual","flanking_qual","gtype_allele","failed_gtype","tmp_individual_genotype_single_bp") {
foreach my $table ("gtype_allele","failed_gtype","tmp_individual_genotype_single_bp") {
my $single_table_ref = $self->{'dbSara'}->db_handle->selectall_arrayref(qq{SHOW tables like "$table%"});
my @tables = map {$_->[0] } @$single_table_ref;
my $table_names = join ',',@tables;
print "table_names is $table_names\n";
$self->{'dbSara'}->do(qq{DROP TABLES $table_names});
}
}
sub remove_empty_tables {
my ($self) = @_;
#foreach my $table ("combine_feature","snp_pos","failed_flanking_qual","flanking_qual","gtype_allele","failed_gtype","tmp_individual_genotype_single_bp") {
foreach my $table ("flanking_qual","failed_flanking_qual") {
my $single_table_ref = $self->{'dbSara'}->db_handle->selectall_arrayref(qq{SHOW tables like "$table%"});
my @tables = map {$_->[0] } @$single_table_ref;
my @empty_tables;
foreach my $table (@tables) {
my $table_row_ref = $self->{'dbSara'}->db_handle->selectall_arrayref(qq{SELECT COUNT(*) FROM $table});
#print "table is $table\n";
my $table_row = $table_row_ref->[0][0];
if (! $table_row) {
push @empty_tables, $table;
}
}
my $table_names = join ',',@empty_tables;
print "empty_table_names is $table_names\n";
$self->{'dbSara'}->do(qq{DROP TABLES $table_names});
}
}
sub merge_table_with_same_seq_region_id {
my ($self) = @_;
#foreach my $table ("combine_feature","snp_pos","flanking_qual","gtype_allele","failed_qual","failed_gtype","tmp_individual_genotype_single_bp") {
foreach my $table ("flanking_qual","failed_qual") {
my $single_table_ref = $self->{'dbSara'}->db_handle->selectall_arrayref(qq{SHOW tables like "$table\_%"});
my @tables = map {$_->[0] } @$single_table_ref;
my $main_tab_name;
$self->{'dbSara'}->do(qq{CREATE TABLE IF NOT EXISTS $main_tab_name like $tables[0]});
foreach my $tab (@tables) {
if ($tab =~ /(.*\_.*).*\_(\d+)(\_\d+)$/) {
$main_tab_name = $1.$3;
$self->{'dbSara'}->do(qq{CREATE TABLE IF NOT EXISTS $main_tab_name like $tab});
print "table is $tab and main table is $main_tab_name\n";
#$self->{'dbSara'}->do(qq{INSERT INTO $main_tab_name select * from $tab});
#$self->{'dbSara'}->do(qq{DROP TABLE $tab});
}
}
}
}
1;
| 44.602452 | 335 | 0.668879 |
73e314218025e62637f128dfa2aad4797939b350
| 9,865 |
t
|
Perl
|
S06-advanced/dispatching.t
|
usev6/roast
|
8e61eb8757c26e99e32d4ba5274dae07606c99b4
|
[
"Artistic-2.0"
] | 99 |
2015-03-03T13:01:44.000Z
|
2020-03-05T15:21:43.000Z
|
S06-advanced/dispatching.t
|
usev6/roast
|
8e61eb8757c26e99e32d4ba5274dae07606c99b4
|
[
"Artistic-2.0"
] | 331 |
2015-02-17T15:26:22.000Z
|
2020-03-16T18:29:49.000Z
|
S06-advanced/dispatching.t
|
usev6/roast
|
8e61eb8757c26e99e32d4ba5274dae07606c99b4
|
[
"Artistic-2.0"
] | 136 |
2015-02-02T13:34:10.000Z
|
2020-02-18T02:26:59.000Z
|
use v6;
use Test;
use soft;
plan 7;
subtest 'Basic interaction of nextwith/nextsame with multi and method dispatch' => {
my class Foo {
method foo($v) {
$v * 2
}
}
my class Bar is Foo {
multi method foo(Int $v) {
nextsame
}
multi method foo(Str $v) {
nextwith $v.Int
}
}
my $obj = Bar.new;
#?rakudo.jvm 2 todo 'Dispatching needs more work on the JVM backend'
is $obj.foo(21), 42, "Int is dispatched";
is $obj.foo("11"), 22, "Str is dispatched";
}
subtest 'Args to callwith in wrapper are used by enclosing multi and method dispatch' => {
my class C1 {
method m($x) {
"C1: $x"
}
}
my class C2 is C1 {
multi method m(Any $x) {
"C2/Any: $x\n" ~ callsame()
}
multi method m(Int $x) {
"C2/Int: $x\n" ~ callsame()
}
}
C2.^lookup('m').candidates[1].wrap: -> \s, $x {
"Wrapper: $x\n" ~ callwith(s, $x + 1)
}
#?rakudo.jvm 2 todo 'Dispatching needs more work on the JVM backend'
is C2.m(1), "Wrapper: 1\nC2/Int: 2\nC2/Any: 2\nC1: 2", 'First call';
is C2.m(1), "Wrapper: 1\nC2/Int: 2\nC2/Any: 2\nC1: 2", 'Second call';
}
subtest 'Args to callwith in multi are used by enclosing method dispatch' => {
my class C1 {
method m($x) {
"C1: $x"
}
}
my class C2 is C1 {
multi method m(Any $x) {
"C2/Any: $x\n" ~ callsame()
}
multi method m(Int $x) {
"C2/Int: $x\n" ~ callwith($x + 1)
}
}
C2.^lookup('m').candidates[1].wrap: -> \s, $x {
"Wrapper: $x\n" ~ callwith(s, $x + 1)
}
#?rakudo.jvm 2 todo 'Dispatching needs more work on the JVM backend'
is C2.m(1), "Wrapper: 1\nC2/Int: 2\nC2/Any: 3\nC1: 3", 'First call';
is C2.m(1), "Wrapper: 1\nC2/Int: 2\nC2/Any: 3\nC1: 3", 'Second call';
}
#?rakudo skip 'Various cases of wrap not yet supported'
#?DOES 1
{
subtest "Dispatcher Chain" => {
plan 14;
my @order;
my class C1 {
method foo(|) { @order.push: ::?CLASS.^name }
}
my class C2 is C1 {
proto method foo(|) {*}
multi method foo(Str $s) {
@order.push: ::?CLASS.^name ~ "(Str)";
nextsame;
}
multi method foo(Int $s) {
@order.push: ::?CLASS.^name ~ "(Int)";
nextsame;
}
multi method foo(Num) {
@order.push: ::?CLASS.^name ~ "(Num)";
nextsame
}
}
my class C3 is C2 {
method foo(|) {
@order.push: ::?CLASS.^name;
nextsame
}
}
my class C4 is C3 {
proto method foo(|) {*}
multi method foo(Int:D $v) {
@order.push: ::?CLASS.^name ~ "(Int:D)";
nextwith ~$v
}
multi method foo(Any) {
@order.push: ::?CLASS.^name ~ "(Any)";
callsame
}
}
my $inst;
$inst = C3.new;
$inst.foo("bar");
is-deeply @order.List, <C3 C2(Str) C1>, "a multi-method doesn't break MRO dispatching";
@order = [];
$inst.foo(42);
is-deeply @order.List, <C3 C2(Int) C1>, "a multi-method dispatching works correctly";
$inst = C4.new;
@order = [];
$inst.foo("baz");
is-deeply @order.List, <C4(Any) C3 C2(Str) C1>, "multi being the first method in MRO still works";
@order = [];
$inst.foo(13);
is-deeply @order.List, <C4(Int:D) C4(Any) C3 C2(Str) C1>, "nextwith does what's expected";
my \proto := C2.^find_method('foo', :local, :no_fallback);
nok proto.is-wrapped, "proto is not wrapped yet";
my $wh1 = proto.wrap(my method foo-wrap(|) { @order.push: "foo-proto"; nextsame });
ok proto.is-wrapped, "proto is wrapped now";
@order = [];
$inst.foo("");
is-deeply @order.List, <C4(Any) C3 foo-proto C2(Str) C1>, "proto can be wrapped";
proto.unwrap($wh1);
@order = [];
$inst.foo("");
is-deeply @order.List, <C4(Any) C3 C2(Str) C1>, "proto can be unwrapped";
nok proto.is-wrapped, "proto is in unwrapped state";
# This should be foo(Num) candidate
my \cand = proto.candidates[2];
# Note that next* can't be used with blocks.
$wh1 = cand.wrap(-> *@ { @order.push('foo-num-wrap'); callsame });
@order = [];
$inst.foo(pi);
is-deeply @order.List, <C4(Any) C3 foo-num-wrap C2(Num) C1>, "we can wrap a candidate";
# We can even wrap a candidate with another multi. It works!
proto multi-wrap(|) {*}
multi multi-wrap(\SELF, Num) {
@order.push: "multi-wrap(Num)";
nextsame
}
multi multi-wrap(\SELF, Any) {
@order.push: "multi-wrap(Any)";
nextsame
}
my $wh2 = cand.wrap(&multi-wrap);
@order = [];
$inst.foo(pi);
is-deeply @order.List, <C4(Any) C3 multi-wrap(Num) multi-wrap(Any) foo-num-wrap C2(Num) C1>, "we can use a multi as a wrapper of a candidate";
cand.unwrap($wh1);
@order = [];
$inst.foo(pi);
is-deeply @order.List, <C4(Any) C3 multi-wrap(Num) multi-wrap(Any) C2(Num) C1>, "we can unwrap a multi";
# Even nastier thing: wrap a candidate of our wrapper!
my $wwh = &multi-wrap.candidates[1].wrap(sub wrap-wrapper(|) { @order.push: 'cand-wrap'; nextsame });
@order = [];
$inst.foo(pi);
is-deeply @order.List, <C4(Any) C3 multi-wrap(Num) cand-wrap multi-wrap(Any) C2(Num) C1>, "we can use a multi as a wrapper of a candidate";
# Unwrap the method candidate from the second wrapper. We then get the original behavior.
cand.unwrap($wh2);
@order = [];
$inst.foo(pi);
is-deeply @order.List, <C4(Any) C3 C2(Num) C1>, "we can use a multi as a wrapper of a candidate";
}
}
#?rakudo skip 'Various cases of wrap not yet supported'
#?DOES 1
{
subtest "Regression: nextcallee" => {
plan 2;
my @order;
my class C1 {
method foo(|) {
@order.push: ::?CLASS.^name
}
}
my class C2 is C1 {
method foo(|args) {
@order.push: ::?CLASS.^name;
my &callee = nextcallee;
self.&callee(|args)
}
}
my class C3 is C2 {
method foo(|args) {
@order.push: ::?CLASS.^name;
nextsame
}
}
my $inst = C3.new;
@order = [];
$inst.foo;
is-deeply @order.List, <C3 C2 C1>, "checkpoint";
C2.^find_method('foo', :no_fallback, :local)
.wrap(
sub (|args) {
@order.push: 'C2::foo::wrapper';
my &callee = nextcallee;
&callee(|args)
});
@order = [];
$inst.foo;
is-deeply @order.List, <C3 C2::foo::wrapper C2 C1>, "nextcallee doesn't break the dispatcher chain";
}
}
subtest "Regression: broken chain" => {
plan 2;
my @order;
my class C1 {
multi method foo {
@order.push: "C1::foo";
$.bar;
}
proto method bar(|) {*}
multi method bar {
@order.push: "C1::bar";
nextsame
}
}
my class C2 is C1 {
proto method bar(|) {*}
multi method bar {
@order.push: "C2::bar";
nextsame
}
method foo {
@order.push: "C2::foo";
nextsame;
}
}
my $inst = C2.new;
$inst.bar;
#?rakudo.jvm todo 'Dispatching needs more work on the JVM backend'
is-deeply @order.List, <C2::bar C1::bar>, "control: multi dispatches as expected";
@order = [];
$inst.foo;
#?rakudo.jvm todo 'Dispatching needs more work on the JVM backend'
is-deeply @order.List, <C2::foo C1::foo C2::bar C1::bar>, "multi-dispatch is not broken";
}
#?rakudo skip 'Various cases of wrap not yet supported'
#?DOES 1
{
# GH Raku/problem-solving#170
subtest "Wrap parent's first multi-candidate" => {
plan 3;
my @order;
my $inst;
my class C1 {
method foo(|) {
@order.push: 'C1::foo'
}
}
my class C2 is C1 {
proto method foo(|) {*}
multi method foo(Int) {
@order.push: 'C2::foo(Int)';
nextsame;
}
multi method foo(Any) {
@order.push: 'C2::foo(Any)';
nextsame;
}
}
my class C3 is C2 {
method foo(|) {
@order.push: 'C3::foo';
nextsame
}
}
my @orig-order = <C3::foo C2::foo(Int) C2::foo(Any) C1::foo>;
$inst = C3.new;
$inst.foo(42);
is-deeply @order, @orig-order, "control: multi-dispatch as expected";
my $wh = C2.^lookup('foo').candidates[0].wrap(
-> | {
@order.push: "C2::foo::wrapper";
callsame
}
);
@order = [];
$inst.foo(42);
is-deeply
@order.List,
<C3::foo C2::foo::wrapper C2::foo(Int) C2::foo(Any) C1::foo>,
"wrapping of the first candidate doesn't break the chain";
$wh.restore;
@order = [];
$inst.foo(42);
is-deeply @order, @orig-order, "unwrapping of the candidate restores the order";
}
}
done-testing;
# vim: expandtab shiftwidth=4
| 29.186391 | 150 | 0.484643 |
ed606cccef6e9bbc0d5e8fd26607ced6523789fa
| 30,768 |
pm
|
Perl
|
modules/Bio/EnsEMBL/Map/DBSQL/DitagFeatureAdaptor.pm
|
duartemolha/ensembl
|
378db18977278a066bd54b41e7afcf1db05d7db3
|
[
"Apache-2.0"
] | null | null | null |
modules/Bio/EnsEMBL/Map/DBSQL/DitagFeatureAdaptor.pm
|
duartemolha/ensembl
|
378db18977278a066bd54b41e7afcf1db05d7db3
|
[
"Apache-2.0"
] | null | null | null |
modules/Bio/EnsEMBL/Map/DBSQL/DitagFeatureAdaptor.pm
|
duartemolha/ensembl
|
378db18977278a066bd54b41e7afcf1db05d7db3
|
[
"Apache-2.0"
] | null | null | null |
=head1 LICENSE
Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Copyright [2016-2019] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
=head1 CONTACT
Please email comments or questions to the public Ensembl
developers list at <http://lists.ensembl.org/mailman/listinfo/dev>.
Questions may also be sent to the Ensembl help desk at
<http://www.ensembl.org/Help/Contact>.
=cut
=head1 NAME
Bio::EnsEMBL::Map::DBSQL::DitagFeatureAdaptor
=head1 SYNOPSIS
my $dfa = $db->get_DitagFeatureAdaptor;
my $ditagFeatures = $dfa->fetch_all_by_Slice( $slice, "SME005" );
foreach my $ditagFeature (@$ditagFeatures) {
print $ditagFeature->ditag_id . " "
. $ditagFeature->slice . " "
. $ditagFeature->start . "-"
. $ditagFeature->end . " "
. $ditagFeature->strand;
}
=head1 DESCRIPTION
Provides database interaction for the Bio::EnsEMBL::Map::DitagFeature
object
=head1 METHODS
=cut
package Bio::EnsEMBL::Map::DBSQL::DitagFeatureAdaptor;
use strict;
use vars ('@ISA');
use Bio::EnsEMBL::Map::Ditag;
use Bio::EnsEMBL::Map::DitagFeature;
use Bio::EnsEMBL::DBSQL::BaseAdaptor;
use Bio::EnsEMBL::Utils::Exception qw( throw warning );
@ISA = qw(Bio::EnsEMBL::DBSQL::BaseAdaptor);
=head2 fetch_all
Arg [1] : none
Example : @all_tags = @{$ditagfeature_adaptor->fetch_all};
Description: Retrieves all ditagFeatures from the database;
Usually not a good idea, use fetch_all_by_Slice instead.
Returntype : listref of Bio::EnsEMBL::Map::DitagFeature
Caller : general
Status : At Risk
=cut
sub fetch_all {
my $self = shift;
my $sth = $self->prepare("SELECT df.ditag_feature_id, df.ditag_id, df.seq_region_id,
df.seq_region_start, df.seq_region_end, df.seq_region_strand,
df.analysis_id, df.hit_start, df.hit_end, df.hit_strand,
df.cigar_line, df.ditag_side, df.ditag_pair_id, d.tag_count
FROM ditag_feature df, ditag d
WHERE df.ditag_id=d.ditag_id" );
$sth->execute;
my $result = $self->_fetch($sth);
return $result;
}
=head2 fetch_by_dbID
Arg [1] : ditagFeature dbID
Example : @my_tags = @{$ditagfeature_adaptor->fetch_by_dbID($my_id)};
Description: Retrieves a ditagFeature from the database.
Returntype : Bio::EnsEMBL::Map::DitagFeature
Caller : general
Status : At Risk
=cut
sub fetch_by_dbID {
my ($self, $dbid) = @_;
my $sth = $self->prepare("SELECT df.ditag_feature_id, df.ditag_id, df.seq_region_id,
df.seq_region_start, df.seq_region_end, df.seq_region_strand,
df.analysis_id, df.hit_start, df.hit_end, df.hit_strand,
df.cigar_line, df.ditag_side, df.ditag_pair_id, d.tag_count
FROM ditag_feature df, ditag d
WHERE df.ditag_id=d.ditag_id AND df.ditag_feature_id = ?" );
$sth->execute($dbid);
my $result = $self->_fetch($sth);
return $result->[0];
}
=head2 fetch_all_by_ditagID
Arg [1] : ditag dbID
Arg [2] : (optional) ditag-pair dbID
Arg [3] : (optional) analysis ID
Example : @my_tags = @{$ditagfeature_adaptor->fetch_all_by_ditag_id($my_id)};
Description: Retrieves all ditagFeatures from the database linking to a specific ditag-id
Returntype : listref of Bio::EnsEMBL::Map::DitagFeature
Caller : general
Status : At Risk
=cut
sub fetch_all_by_ditagID {
my ($self, $ditag_id, $ditag_pair_id, $analysis_id) = @_;
my $arg = $ditag_id;
my $sql = "SELECT df.ditag_feature_id, df.ditag_id, df.seq_region_id,
df.seq_region_start, df.seq_region_end, df.seq_region_strand,
df.analysis_id, df.hit_start, df.hit_end, df.hit_strand,
df.cigar_line, df.ditag_side, df.ditag_pair_id, d.tag_count
FROM ditag_feature df, ditag d
WHERE df.ditag_id=d.ditag_id AND df.ditag_id = ? ";
if($ditag_pair_id){
$sql .= "AND df.ditag_pair_id = ? ";
$arg .= ", $ditag_pair_id";
}
if($analysis_id){
$sql .= "AND df.analysis_id = ? ";
$arg .= ", $analysis_id";
}
$sql .= "ORDER BY df.ditag_pair_id";
my $sth = $self->prepare($sql);
$sth->execute(split(",",$arg));
my $result = $self->_fetch($sth);
return $result;
}
=head2 fetch_all_by_type
Arg [1] : ditag type
Example : @my_tags = @{$ditagfeature_adaptor->fetch_all_by_type($type)};
Description: Retrieves all ditagFeatures from the database linking to a specific ditag-type
Returntype : listref of Bio::EnsEMBL::Map::DitagFeature
Caller : general
Status : At Risk
=cut
sub fetch_all_by_type {
my ($self, $ditag_type) = @_;
my $sth = $self->prepare("SELECT df.ditag_feature_id, df.ditag_id, df.seq_region_id,
df.seq_region_start, df.seq_region_end, df.seq_region_strand,
df.analysis_id, df.hit_start, df.hit_end, df.hit_strand,
df.cigar_line, df.ditag_side, df.ditag_pair_id, d.tag_count
FROM ditag_feature df, ditag d
WHERE df.ditag_id=d.ditag_id AND d.type = ?
ORDER BY df.ditag_id, df.ditag_pair_id" );
$sth->execute($ditag_type);
my $result = $self->_fetch($sth);
return $result;
}
=head2 fetch_all_by_Slice
Arg [1] : Bio::EnsEMBL::Slice
Arg [2] : (optional) ditag type name (specific library) or an aray ref with multiple type names
Arg [3] : (optional) analysis logic_name
Example : $tags = $ditagfeature_adaptor->fetch_all_by_Slice($slice, "SME005");
Description: Retrieves ditagFeatures from the database overlapping a specific region
and (optional) of a specific ditag type or analysis.
Start & end locations are returned in slice coordinates, now.
Returntype : listref of Bio::EnsEMBL::Map::DitagFeatures
Caller : general
Status : At Risk
=cut
sub fetch_all_by_Slice {
my ($self, $slice, $tagtype, $logic_name) = @_;
my @result;
my $moresql;
if(!ref($slice) || !$slice->isa("Bio::EnsEMBL::Slice")) {
throw("Bio::EnsEMBL::Slice argument expected not $slice.");
}
#get affected ditag_feature_ids
my $sql = "SELECT df.ditag_feature_id, df.ditag_id, df.seq_region_id, df.seq_region_start,
df.seq_region_end, df.seq_region_strand, df.analysis_id, df.hit_start, df.hit_end,
df.hit_strand, df.cigar_line, df.ditag_side, df.ditag_pair_id,
d.tag_count
FROM ditag_feature df, ditag d
WHERE df.ditag_id=d.ditag_id";
if($tagtype){
my $tagtypes = '';
#check if array
if(ref $tagtype eq 'ARRAY'){
my @arraytype_mod;
foreach my $arraytype (@$tagtype){ push @arraytype_mod, '"'.$arraytype.'"' }
$tagtypes = join(", ", @arraytype_mod);
}
else{
$tagtypes = '"'.$tagtype.'"';
}
$sql .= " AND d.type IN(".$tagtypes.")";
}
if($logic_name){
my $analysis = $self->db->get_AnalysisAdaptor->fetch_by_logic_name($logic_name);
if(!$analysis) {
return undef;
}
$sql .= " AND df.analysis_id = ".$analysis->dbID();
}
$sql .= " AND df.seq_region_id = ".$slice->get_seq_region_id.
" AND df.seq_region_start <= ".$slice->end.
" AND df.seq_region_end >= ".$slice->start;
my $sth = $self->prepare($sql);
$sth->execute();
my $result = $self->_fetch($sth, $slice);
push(@result, @$result);
return \@result;
}
=head2 fetch_pairs_by_Slice
Arg [1] : Bio::EnsEMBL::Slice
Arg [2] : (optional) ditag type (specific library)
Arg [3] : (optional) analysis logic_name
Example : my $ditagfeatures = $dfa->fetch_pairs_by_Slice($slice);
foreach my $ditagfeature (@$ditagfeatures){
$minstart = $$ditagfeature2{'start'};
$maxend = $$ditagfeature2{'end'};
$bothstrand = $$ditagfeature2{'strand'};
$tag_count = $$ditagfeature2{'tag_count'};
print "$minstart, $maxend, $bothstrand, $tag_count\n";
}
Description: Retrieves ditagFeature information in pairs from the database overlapping a specific region
and (optional) of a specific ditag type or analysis. The absotute start and end points are
fetched.
Slices should be SMALL!
Returntype : array ref with hash ref of artifical DitagFeature object
Caller : general
Status : At Risk
=cut
sub fetch_pairs_by_Slice {
my ($self, $slice, $tagtype, $logic_name) = @_;
my ($tag_id, $pair_id, $seq_region_id, $start, $end, $strand, $analysis_id, $tag_count);
my @result;
my $sql = "SELECT df.ditag_id, df.ditag_pair_id, df.seq_region_id, MIN(df.seq_region_start), ".
"MAX(df.seq_region_end), df.seq_region_strand, df.analysis_id, d.tag_count ".
"FROM ditag_feature df, ditag d ".
"WHERE df.ditag_id=d.ditag_id ";
if($tagtype){
$sql .= "AND d.type = \"".$tagtype."\"";
}
$sql .= " AND df.seq_region_id = ".$slice->get_seq_region_id.
" AND df.seq_region_start <= ".$slice->end.
" AND df.seq_region_end >= ".$slice->start;
if($logic_name){
my $analysis = $self->db->get_AnalysisAdaptor->fetch_by_logic_name($logic_name);
if(!$analysis) {
return undef;
}
$sql .= " AND df.analysis_id = ".$analysis->dbID();
}
$sql .= " GROUP BY df.ditag_id, df.ditag_pair_id;";
my $sth = $self->prepare($sql);
$sth->execute();
$sth->bind_columns( \$tag_id, \$pair_id, \$seq_region_id, \$start, \$end, \$strand, \$analysis_id ,\$tag_count);
while ( $sth->fetch ) {
# convert into relative slice coordinates
my $seq_region_len = $slice->seq_region_length();
if ($slice->strand == 1) { # Positive strand
$start = $start - $slice->start + 1;
$end = $end - $slice->start + 1;
if ($slice->is_circular()) { # Handle circular chromosomes
if ($start > $end) { # Looking at a feature overlapping the chromsome origin
if ($end > $slice->start) {
# Looking at the region in the beginning of the chromosome
$start -= $seq_region_len;
}
if ($end < 0) {
$end += $seq_region_len;
}
} else {
if ($slice->start > $slice->end && $end < 0) {
# Looking at the region overlapping the chromosome origin and
# a feature which is at the beginning of the chromosome
$start += $seq_region_len;
$end += $seq_region_len;
}
}
} # end if ($dest_slice->is_circular...)
} else { # Negative strand
my ($seq_region_start, $seq_region_end) = ($start, $end);
$start = $slice->end - $seq_region_end + 1;
$end = $slice->end - $seq_region_start + 1;
if ($slice->is_circular()) {
if ($slice->start > $slice->end) { # slice spans origin or replication
if ($seq_region_start >= $slice->start) {
$end += $seq_region_len;
$start += $seq_region_len
if $seq_region_end > $slice->start;
} elsif ($seq_region_start <= $slice->end) {
# do nothing
} elsif ($seq_region_end >= $slice->start) {
$start += $seq_region_len;
$end += $seq_region_len;
} elsif ($seq_region_end <= $slice->end) {
$end += $seq_region_len
if $end < 0;
} elsif ($seq_region_start > $seq_region_end) {
$end += $seq_region_len;
} else { }
} else {
if ($seq_region_start <= $slice->end and $seq_region_end >= $slice->start) {
# do nothing
} elsif ($seq_region_start > $seq_region_end) {
if ($seq_region_start <= $slice->end) {
$start -= $seq_region_len;
} elsif ($seq_region_end >= $slice->start) {
$end += $seq_region_len;
} else { }
}
}
}
$strand *= -1;
}
my %ditag_feature_pair = (
ditag => $tag_id,
pair_id => $pair_id,
region => $seq_region_id,
start => $start,
end => $end,
strand => $strand,
analysis => $analysis_id,
tag_count => $tag_count
);
push(@result, \%ditag_feature_pair);
}
return \@result;
}
=head2 _fetch
Arg [1] : statement handler
Arg [2] : (optional) target-slice for the feature
Description: generic sql-fetch function for the DitagFeature fetch methods
Returntype : listref of Bio::EnsEMBL::Map::DitagFeatures
Caller : private
Status : At Risk
=cut
sub _fetch {
my ($self, $sth, $dest_slice) = @_;
my ( $tag_id, $mothertag_id, $seqreg, $seqstart, $seqend, $strand, $analysis_id, $hit_start,
$hit_end, $hit_strand, $cigar_line, $ditag_side, $ditag_pair_id, $tag_count );
$sth->bind_columns( \$tag_id, \$mothertag_id, \$seqreg,
\$seqstart, \$seqend, \$strand,
\$analysis_id, \$hit_start, \$hit_end,
\$hit_strand, \$cigar_line, \$ditag_side,
\$ditag_pair_id, \$tag_count );
my @ditag_features;
my $dest_slice_start;
my $dest_slice_end;
my $dest_slice_strand;
if($dest_slice) {
$dest_slice_start = $dest_slice->start();
$dest_slice_end = $dest_slice->end();
$dest_slice_strand = $dest_slice->strand();
}
while ( $sth->fetch ) {
my $analysis_obj = $self->db->get_AnalysisAdaptor->fetch_by_dbID($analysis_id);
my $slice = $self->db->get_SliceAdaptor->fetch_by_seq_region_id($seqreg);
if($dest_slice) {
if($dest_slice_start != 1 || $dest_slice_strand != 1) {
if($dest_slice_strand == 1) {
$seqstart = $seqstart - $dest_slice_start + 1;
$seqend = $seqend - $dest_slice_start + 1;
} else {
my $tmp_seq_region_start = $seqstart;
$seqstart = $dest_slice_end - $seqend + 1;
$seqend = $dest_slice_end - $tmp_seq_region_start + 1;
$strand *= -1;
}
}
my $seq_region_len = $dest_slice->seq_region_length();
if ($dest_slice_strand == 1) { # Positive strand
$seqstart = $seqstart - $dest_slice_start + 1;
$seqend = $seqend - $dest_slice_start + 1;
if ($dest_slice->is_circular()) { # Handle cicular chromosomes
if ($seqstart > $seqend) { # Looking at a feature overlapping the chromsome origin
if ($seqend > $dest_slice_start) {
# Looking at the region in the beginning of the chromosome.
$seqstart -= $seq_region_len;
}
if ($seqend < 0) {
$seqend += $seq_region_len;
}
} else {
if ($dest_slice_start > $dest_slice_end && $seqend < 0) {
# Looking at the region overlapping the chromosome origin and
# a feature which is at the beginning of the chromosome.
$seqstart += $seq_region_len;
$seqend += $seq_region_len;
}
}
}
} else { # Negative strand
my $start = $dest_slice_end - $seqend + 1;
my $end = $dest_slice_end - $seqstart + 1;
if ($dest_slice->is_circular()) {
if ($dest_slice_start > $dest_slice_end) {
# slice spans origin or replication
if ($seqstart >= $dest_slice_start) {
$end += $seq_region_len;
$start += $seq_region_len
if $seqend > $dest_slice_start;
} elsif ($seqstart <= $dest_slice_end) {
# do nothing
} elsif ($seqend >= $dest_slice_start) {
$start += $seq_region_len;
$end += $seq_region_len;
} elsif ($seqend <= $dest_slice_end) {
$end += $seq_region_len
if $end < 0;
} elsif ($seqstart > $seqend) {
$end += $seq_region_len;
} else { }
} else {
if ($seqstart <= $dest_slice_end and $seqend >= $dest_slice_start) {
# do nothing
} elsif ($seqstart > $seqend) {
if ($seqstart <= $dest_slice_end) {
$start -= $seq_region_len;
} elsif ($seqend >= $dest_slice_start) {
$end += $seq_region_len;
} else { }
}
}
}
$seqstart = $start;
$seqend = $end;
$strand *= -1;
}
$slice = $dest_slice;
}
push @ditag_features,
Bio::EnsEMBL::Map::DitagFeature->new( -dbid => $tag_id,
-slice => $slice,
-start => $seqstart,
-end => $seqend,
-strand => $strand,
-analysis => $analysis_obj,
-hit_start => $hit_start,
-hit_end => $hit_end,
-hit_strand => $hit_strand,
-ditag_id => $mothertag_id,
-cigar_line => $cigar_line,
-ditag_side => $ditag_side,
-ditag_pair_id => $ditag_pair_id,
-ditag => undef,
-tag_count => $tag_count,
-adaptor => $self,
);
}
return \@ditag_features;
}
=head2 sequence
Arg [1] : dbID of DitagFeature
Example : $ditagfeature_adaptor->get_sequence($ditagFeature->dbID)
Description: get the part of the sequence of a ditag,
that is actully aligned to the genome.
Returntype : string
Exceptions : thrown if not all data needed for storing is populated in the
ditag features
Caller : Bio::EnsEMBL::Map::DitagFeature
Status : At Risk
=cut
sub sequence {
my ($self, $dbID) = @_;
my $sequence = undef;
my $db = $self->db() or throw "Couldn t get database connection.";
my $sql = "SELECT d.sequence, df.hit_start, df.hit_end, df.hit_strand ".
"FROM ditag d, ditag_feature df ".
"WHERE df.ditag_id=d.ditag_id and df.ditag_feature_id = ?";
my $sth = $db->dbc->prepare($sql);
$sth->execute( $dbID );
my ($seq, $start, $end, $strand) = $sth->fetchrow_array();
if($seq and $start and $end and $strand){
$sequence = substr($seq, ($start-1), ($end-$strand));
if($strand == -1) {
$sequence =~ tr/acgtrymkswhbvdnxACGTRYMKSWHBVDNX/tgcayrkmswdvbhnxTGCAYRKMSWDVBHNX/;
}
}
return $sequence;
}
=head2 store
Arg [1] : (Array ref of) Bio::EnsEMBL::Map::DitagFeature
Example : $ditagfeature_adaptor->store(@ditag_features);
Description: Stores a single ditagFeature or
a list of ditagFeatures in this database.
Returntype : none
Exceptions : thrown if not all data needed for storing is populated in the
ditag features
Caller : general
Status : At Risk
=cut
sub store {
my ( $self, $ditag_features ) = @_;
if ( ref $ditag_features eq 'ARRAY' ) {
if ( scalar(@$ditag_features) == 0 ) {
throw( "Must call store with ditag_feature or list ref of ditags_features" );
}
} elsif ($ditag_features) {
my @ditag_features;
push @ditag_features, $ditag_features;
$ditag_features = \@ditag_features;
} else {
throw( "Must call store with ditag_feature or list ref of ditag_features." );
}
my $db = $self->db() or throw "Couldn t get database connection.";
my $sth1 = $self->prepare( "INSERT INTO ditag_feature( ditag_id, seq_region_id, seq_region_start,
seq_region_end, seq_region_strand, analysis_id, hit_start, hit_end,
hit_strand, cigar_line, ditag_side, ditag_pair_id )
VALUES( ?,?,?,?,?,?,?,?,?,?,?,? )" );
my $sth2 = $self->prepare( "INSERT INTO ditag_feature( ditag_feature_ID, ditag_id, seq_region_id,
seq_region_start, seq_region_end, seq_region_strand, analysis_id, hit_start,
hit_end, hit_strand, cigar_line, ditag_side, ditag_pair_id )
VALUES( ?,?,?,?,?,?,?,?,?,?,?,?,? )" );
# my $sth3 = $self->prepare( "SELECT COUNT(*) FROM ditag_feature
# WHERE ditag_id = ?" );
TAG:
foreach my $ditag_feature (@$ditag_features) {
if ( !ref $ditag_feature || !$ditag_feature->isa("Bio::EnsEMBL::Map::DitagFeature") ) {
throw( "Object must be an Ensembl DitagFeature, "
. "not a " . ref($ditag_feature) );
}
if ( $ditag_feature->is_stored($db) ) {
warning( "DitagFeature " . $ditag_feature->dbID .
" is already stored in this database,".
" maybe you need to use the update() method?" );
next TAG;
}
if(!$ditag_feature->ditag_id or !($self->db->get_DitagAdaptor->fetch_by_dbID($ditag_feature->ditag_id))){
throw("DitagFeature must be supplied with the id of a corresponding Ditag object.");
}
if(!$ditag_feature->ditag or !$ditag_feature->ditag->isa("Bio::EnsEMBL::Map::Ditag")){
throw("DitagFeature must be linked to a valid Ditag object.");
}
# #check if more than x tags with this ditag id exist
# $sth3->execute( $ditag_feature->ditag_id );
# my ($num) = $sth3->fetchrow_array();
# if ( ($num) and ($num > 1) ) {
# warning( "There are already at least 2 DitagFeatures relating to Ditag ".
# $ditag->ditag_id." stored in this database." );
# if ( $num > 4 ) {
# warning( "not storing" );
# next TAG;
# }
# }
if ( $ditag_feature->dbID ) {
$sth2->bind_param( 1, $ditag_feature->dbID, SQL_INTEGER );
$sth2->bind_param( 2, $ditag_feature->ditag_id, SQL_INTEGER );
$sth2->bind_param( 3, ($ditag_feature->slice->get_seq_region_id), SQL_INTEGER );
$sth2->bind_param( 4, $ditag_feature->start, SQL_INTEGER );
$sth2->bind_param( 5, $ditag_feature->end, SQL_INTEGER );
$sth2->bind_param( 6, $ditag_feature->strand, SQL_VARCHAR );
$sth2->bind_param( 7, $ditag_feature->analysis->dbID, SQL_INTEGER );
$sth2->bind_param( 8, $ditag_feature->hit_start, SQL_INTEGER );
$sth2->bind_param( 9, $ditag_feature->hit_end, SQL_INTEGER );
$sth2->bind_param( 10, $ditag_feature->hit_strand, SQL_VARCHAR );
$sth2->bind_param( 11, $ditag_feature->cigar_line, SQL_VARCHAR );
$sth2->bind_param( 12, $ditag_feature->ditag_side, SQL_VARCHAR );
$sth2->bind_param( 13, $ditag_feature->ditag_pair_id, SQL_VARCHAR );
$sth2->execute();
}
else{
$sth1->bind_param( 1, $ditag_feature->ditag_id, SQL_INTEGER );
$sth1->bind_param( 2, ($ditag_feature->slice->get_seq_region_id), SQL_INTEGER );
$sth1->bind_param( 3, $ditag_feature->start, SQL_INTEGER );
$sth1->bind_param( 4, $ditag_feature->end, SQL_INTEGER );
$sth1->bind_param( 5, $ditag_feature->strand, SQL_VARCHAR );
$sth1->bind_param( 6, $ditag_feature->analysis->dbID, SQL_INTEGER );
$sth1->bind_param( 7, $ditag_feature->hit_start, SQL_INTEGER );
$sth1->bind_param( 8, $ditag_feature->hit_end, SQL_INTEGER );
$sth1->bind_param( 9, $ditag_feature->hit_strand, SQL_VARCHAR );
$sth1->bind_param( 10, $ditag_feature->cigar_line, SQL_VARCHAR );
$sth1->bind_param( 11, $ditag_feature->ditag_side, SQL_VARCHAR );
$sth1->bind_param( 12, $ditag_feature->ditag_pair_id, SQL_VARCHAR );
$sth1->execute();
my $dbID = $self->last_insert_id('ditag_feature_id', undef, 'ditag_feature');
$ditag_feature->dbID($dbID);
$ditag_feature->adaptor($self);
}
}
}
=head2 batch_store
Arg [1] : (Array ref of) Bio::EnsEMBL::Map::DitagFeatures
Arg [2] : bool have_dbIDs
Example : $ditagfeature_adaptor->batch_store(\@ditag_features);
Description: Stores a list of ditagFeatures in this database.
DitagFeatures are expected to have no dbID yet unless flag "have_dbIDs" is true.
They are inserted in one combined INSERT for better performance.
Returntype : none
Exceptions : thrown if not all data needed for storing is given for the
ditag features
Caller : general
Status : At Risk
=cut
sub batch_store {
my ( $self, $ditag_features, $have_dbIDs ) = @_;
my @good_ditag_features;
my ($sql, $sqladd);
my $inserts = 0;
if ( ref $ditag_features eq 'ARRAY' ) {
if ( scalar(@$ditag_features) == 0 ) {
throw( "Must call store with ditag_feature or list ref of ditag_features." );
}
} elsif ($ditag_features) {
my @ditag_features;
push @ditag_features, $ditag_features;
$ditag_features = \@ditag_features;
} else {
throw( "Must call store with ditag_feature or list ref of ditag_features." );
}
my $db = $self->db() or throw "Couldn t get database connection.";
#check whether it s a DitagFeature object and is not stored already
foreach my $ditag_feature (@$ditag_features) {
if ( !ref $ditag_feature || !$ditag_feature->isa("Bio::EnsEMBL::Map::DitagFeature") ) {
throw( "Object must be an Ensembl DitagFeature, "
. "not a " . ref($ditag_feature) );
}
if(!$ditag_feature->ditag_id or !($self->db->get_DitagAdaptor->fetch_by_dbID($ditag_feature->ditag_id))){
throw("DitagFeature must be supplied with the id of a corresponding Ditag object.");
}
if(!$ditag_feature->ditag or !$ditag_feature->ditag->isa("Bio::EnsEMBL::Map::Ditag")){
throw("DitagFeature must be linked to a valid Ditag object.");
}
if ( $ditag_feature->is_stored($db) ) {
warning( "DitagFeature " . $ditag_feature->dbID
. " is already stored in this database." );
next;
}
push(@good_ditag_features, $ditag_feature);
}
$ditag_features = undef;
#create batch INSERT
if($have_dbIDs){
$sql = "INSERT INTO ditag_feature ( ditag_feature_id, ditag_id, seq_region_id, seq_region_start, ".
"seq_region_end, seq_region_strand, analysis_id, hit_start, hit_end, ".
"hit_strand, cigar_line, ditag_side, ditag_pair_id ) VALUES ";
foreach my $ditag_feature (@good_ditag_features) {
$sqladd = "";
if($inserts){ $sqladd = ", " }
$sqladd .= "(". $ditag_feature->ditag_feature_id.", ".$ditag_feature->ditag_id.", ".
($ditag_feature->slice->get_seq_region_id).", ". $ditag_feature->start.", ".
$ditag_feature->end.", '".$ditag_feature->strand."', ".$ditag_feature->analysis->dbID.", ".
$ditag_feature->hit_start.", ".$ditag_feature->hit_end.", '".$ditag_feature->hit_strand.
"', '".$ditag_feature->cigar_line."', '".$ditag_feature->ditag_side."', ".
$ditag_feature->ditag_pair_id.")";
$sql .= $sqladd;
$inserts++;
}
}
else{
$sql = "INSERT INTO ditag_feature ( ditag_id, seq_region_id, seq_region_start, ".
"seq_region_end, seq_region_strand, analysis_id, hit_start, hit_end, ".
"hit_strand, cigar_line, ditag_side, ditag_pair_id ) VALUES ";
foreach my $ditag_feature (@good_ditag_features) {
$sqladd = "";
if($inserts){ $sqladd = ", " }
$sqladd .= "(". $ditag_feature->ditag_id.", ".($ditag_feature->slice->get_seq_region_id).", ".
$ditag_feature->start.", ".$ditag_feature->end.", '".$ditag_feature->strand."', ".
$ditag_feature->analysis->dbID.", ".$ditag_feature->hit_start.", ".$ditag_feature->hit_end.
", '".$ditag_feature->hit_strand."', '".$ditag_feature->cigar_line."', '".
$ditag_feature->ditag_side."', ".$ditag_feature->ditag_pair_id.")";
$sql .= $sqladd;
$inserts++;
}
}
#STORE
if($inserts){
print STDERR "\nHave $inserts Features.\n";
eval{
$db->dbc->do($sql);
};
if($@){
warning("Problem inserting ditag feature batch!".$@."\n");
}
}
else{
warn "Nothing stored!";
}
}
=head2 update
Arg [1] : ditagFeature to update
Description: update an existing ditagFeature with new values
Returntype : 1 on success
Status : At Risk
=cut
sub update {
my ($self, $ditagFeature) = @_;
my $sth = $self->prepare( "UPDATE ditag_feature
SET ditag_id=?, seq_region_id=?, seq_region_start=?, seq_region_end=?,
seq_region_strand=?, analysis_id=?, hit_start=?, hit_end=?, hit_strand=?,
cigar_line=?, ditag_side=?, ditag_pair_id=?
where ditag_feature_id=?;" );
$sth->bind_param(1, $ditagFeature->ditag_id, SQL_INTEGER);
$sth->bind_param(1, $ditagFeature->seq_region_id, SQL_INTEGER);
$sth->bind_param(1, $ditagFeature->seq_region_start, SQL_INTEGER);
$sth->bind_param(1, $ditagFeature->seq_region_end, SQL_INTEGER);
$sth->bind_param(1, $ditagFeature->seq_region_strand, SQL_TINYINT);
$sth->bind_param(1, $ditagFeature->hit_start, SQL_INTEGER);
$sth->bind_param(1, $ditagFeature->hit_end, SQL_INTEGER);
$sth->bind_param(1, $ditagFeature->hit_strand, SQL_TINYINT);
$sth->bind_param(1, $ditagFeature->cigar_line, SQL_LONGVARCHAR);
$sth->bind_param(1, $ditagFeature->ditag_side, SQL_VARCHAR);
$sth->bind_param(1, $ditagFeature->ditag_pair_id, SQL_INTEGER);
$sth->bind_param(1, $ditagFeature->dbID, SQL_INTEGER);
my $result =$sth->execute();
return $result;
}
=head2 list_dbIDs
Args : None
Example : my @feature_ids = @{$dfa->list_dbIDs()};
Description: Gets an array of internal IDs for all DitagFeature objects in
the current database.
Arg[1] : <optional> int. not 0 for the ids to be sorted by the seq_region.
Returntype : List of ints
Exceptions : None
Status : Stable
=cut
sub list_dbIDs {
my ($self, $ordered) = @_;
return $self->_list_dbIDs('ditag_feature', undef, $ordered);
}
1;
| 35.818393 | 114 | 0.597926 |
ed3e36238bbd289265feb3d3ca5e35c24936e1f9
| 1,849 |
pm
|
Perl
|
auto-lib/Paws/KinesisAnalytics/KinesisFirehoseOutputUpdate.pm
|
meis/aws-sdk-perl
|
6d61ffcf351e446f06d7e84e53caa08d98573959
|
[
"Apache-2.0"
] | null | null | null |
auto-lib/Paws/KinesisAnalytics/KinesisFirehoseOutputUpdate.pm
|
meis/aws-sdk-perl
|
6d61ffcf351e446f06d7e84e53caa08d98573959
|
[
"Apache-2.0"
] | null | null | null |
auto-lib/Paws/KinesisAnalytics/KinesisFirehoseOutputUpdate.pm
|
meis/aws-sdk-perl
|
6d61ffcf351e446f06d7e84e53caa08d98573959
|
[
"Apache-2.0"
] | null | null | null |
package Paws::KinesisAnalytics::KinesisFirehoseOutputUpdate;
use Moose;
has ResourceARNUpdate => (is => 'ro', isa => 'Str');
has RoleARNUpdate => (is => 'ro', isa => 'Str');
1;
### main pod documentation begin ###
=head1 NAME
Paws::KinesisAnalytics::KinesisFirehoseOutputUpdate
=head1 USAGE
This class represents one of two things:
=head3 Arguments in a call to a service
Use the attributes of this class as arguments to methods. You shouldn't make instances of this class.
Each attribute should be used as a named argument in the calls that expect this type of object.
As an example, if Att1 is expected to be a Paws::KinesisAnalytics::KinesisFirehoseOutputUpdate object:
$service_obj->Method(Att1 => { ResourceARNUpdate => $value, ..., RoleARNUpdate => $value });
=head3 Results returned from an API call
Use accessors for each attribute. If Att1 is expected to be an Paws::KinesisAnalytics::KinesisFirehoseOutputUpdate object:
$result = $service_obj->Method(...);
$result->Att1->ResourceARNUpdate
=head1 DESCRIPTION
When updating an output configuration using the UpdateApplication
operation, provides information about an Amazon Kinesis Firehose
delivery stream configured as the destination.
=head1 ATTRIBUTES
=head2 ResourceARNUpdate => Str
Amazon Resource Name (ARN) of the Amazon Kinesis Firehose delivery
stream to write to.
=head2 RoleARNUpdate => Str
ARN of the IAM role that Amazon Kinesis Analytics can assume to access
the stream on your behalf. You need to grant the necessary permissions
to this role.
=head1 SEE ALSO
This class forms part of L<Paws>, describing an object used in L<Paws::KinesisAnalytics>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: L<https://github.com/pplu/aws-sdk-perl>
Please report bugs to: L<https://github.com/pplu/aws-sdk-perl/issues>
=cut
| 27.191176 | 122 | 0.762574 |
ed3a06f3e4485f2726fe1399588bf1af11352fa9
| 560 |
al
|
Perl
|
Apps/IN/INVoucherInterface/app/src/pageextension/BankReceiptVoucher.PageExt.al
|
MiguelMercadoActual/ALAppExtensions
|
97ee3823053eb32fa7e38dc3d1e7a89bdcca8d7b
|
[
"MIT"
] | 337 |
2019-05-07T06:04:40.000Z
|
2022-03-31T10:07:42.000Z
|
Apps/IN/INVoucherInterface/app/src/pageextension/BankReceiptVoucher.PageExt.al
|
MiguelMercadoActual/ALAppExtensions
|
97ee3823053eb32fa7e38dc3d1e7a89bdcca8d7b
|
[
"MIT"
] | 14,850 |
2019-05-07T06:04:27.000Z
|
2022-03-31T19:53:28.000Z
|
Apps/IN/INVoucherInterface/app/src/pageextension/BankReceiptVoucher.PageExt.al
|
MiguelMercadoActual/ALAppExtensions
|
97ee3823053eb32fa7e38dc3d1e7a89bdcca8d7b
|
[
"MIT"
] | 374 |
2019-05-09T10:08:14.000Z
|
2022-03-31T17:48:32.000Z
|
pageextension 18942 "Bank Receipt Voucher" extends "Bank Receipt Voucher"
{
layout
{
addafter("Bal. Account No.")
{
field("Cheque No."; "Cheque No.")
{
ApplicationArea = Basic, Suite;
ToolTip = 'Specifies the cheque number of the journal entry.';
}
field("Cheque Date"; "Cheque Date")
{
ApplicationArea = Basic, Suite;
ToolTip = 'Specifies the cheque date of the journal entry.';
}
}
}
}
| 29.473684 | 78 | 0.494643 |
ed792e96ee6ba79ef1332d435d223091c7c17e92
| 2,117 |
pm
|
Perl
|
third-party/webscalesqlclient/mysql-5.6/xtrabackup/test/kewpie/randgen/lib/GenTest/Transform/Count.pm
|
hkirsman/hhvm_centos7_builds
|
2a1fd6de0d2d289c1575f43f10018f3bec23bb13
|
[
"PHP-3.01",
"Zend-2.0"
] | 2 |
2018-03-07T08:31:29.000Z
|
2019-02-01T10:10:48.000Z
|
third-party/webscalesqlclient/mysql-5.6/xtrabackup/test/kewpie/randgen/lib/GenTest/Transform/Count.pm
|
hkirsman/hhvm_centos7_builds
|
2a1fd6de0d2d289c1575f43f10018f3bec23bb13
|
[
"PHP-3.01",
"Zend-2.0"
] | 1 |
2021-02-23T14:52:22.000Z
|
2021-02-23T14:52:22.000Z
|
randgen/lib/GenTest/Transform/Count.pm
|
isabella232/kewpie
|
47d67124fa755719eda3ca5a621a2abf0322d3f9
|
[
"Apache-2.0"
] | 1 |
2018-03-15T05:21:10.000Z
|
2018-03-15T05:21:10.000Z
|
# Copyright (c) 2008, 2011 Oracle and/or its affiliates. All rights reserved.
# Use is subject to license terms.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301
# USA
package GenTest::Transform::Count;
require Exporter;
@ISA = qw(GenTest GenTest::Transform);
use strict;
use lib 'lib';
use GenTest;
use GenTest::Transform;
use GenTest::Constants;
#
# This Transform provides the following transformations
#
# SELECT COUNT(*) FROM ... -> SELECT * FROM ...
#
# SELECT ... FROM ... -> SELECT COUNT(*), ... FROM ...
#
# It avoids GROUP BY and any other aggregate functions because
# those are difficult to validate with a simple check such as
# TRANSFORM_OUTCOME_COUNT
#
sub transform {
my ($class, $orig_query) = @_;
return STATUS_WONT_HANDLE if $orig_query =~ m{GROUP\s+BY|LIMIT|HAVING}sio;
my ($select_list) = $orig_query =~ m{SELECT (.*?) FROM}sio;
if ($select_list =~ m{AVG|BIT|CONCAT|DISTINCT|GROUP|MAX|MIN|STD|SUM|VAR|STRAIGHT_JOIN|SQL_SMALL_RESULT}sio) {
return STATUS_WONT_HANDLE;
} elsif ($select_list =~ m{SELECT\s?\*}sio) {
# "SELECT *" was matched. Cannot have both * and COUNT(...) in SELECT list.
$orig_query =~ s{SELECT (.*?) FROM}{SELECT COUNT(*) FROM}sio;
} elsif ($select_list !~ m{COUNT}sio) {
$orig_query =~ s{SELECT (.*?) FROM}{SELECT COUNT(*) , $1 FROM}sio;
} elsif ($select_list =~ m{^\s*COUNT\(\s*\*\s*\)}sio) {
$orig_query =~ s{SELECT .*? FROM}{SELECT * FROM}sio;
} else {
return STATUS_WONT_HANDLE;
}
return $orig_query." /* TRANSFORM_OUTCOME_COUNT */";
}
1;
| 32.075758 | 110 | 0.705716 |
ed57737c7273b1403a1e3965931fa3986b2d789e
| 13,460 |
pm
|
Perl
|
tests/installation/bootloader_svirt.pm
|
foursixnine/os-autoinst-distri-opensuse
|
b745948a5fb979f74c5e88b38e768b0387445bb1
|
[
"FSFAP"
] | 1 |
2015-12-22T20:23:22.000Z
|
2015-12-22T20:23:22.000Z
|
tests/installation/bootloader_svirt.pm
|
foursixnine/os-autoinst-distri-opensuse
|
b745948a5fb979f74c5e88b38e768b0387445bb1
|
[
"FSFAP"
] | null | null | null |
tests/installation/bootloader_svirt.pm
|
foursixnine/os-autoinst-distri-opensuse
|
b745948a5fb979f74c5e88b38e768b0387445bb1
|
[
"FSFAP"
] | 2 |
2015-02-27T07:34:58.000Z
|
2015-05-15T09:24:55.000Z
|
# SUSE's openQA tests
#
# Copyright 2016-2019 SUSE LLC
# SPDX-License-Identifier: FSFAP
# Summary: svirt bootloader
# Maintainer: Michal Nowak <[email protected]>
package bootloader_svirt;
use base "installbasetest";
use strict;
use warnings;
use testapi;
use utils;
use version_utils qw(is_jeos is_microos is_installcheck is_rescuesystem is_sle is_vmware);
use registration 'registration_bootloader_cmdline';
use data_integrity_utils 'verify_checksum';
use File::Basename;
use network_utils qw(genmac);
sub vmware_set_permanent_boot_device {
return unless is_vmware;
my ($boot_device) = @_;
assert_screen('vmware_bios_frontpage');
# No boot device requested to be set, go on with the default
send_key 'ret' && return unless $boot_device;
# Enter menu with available boot devices
send_key 'f2';
send_key_until_needlematch('vmware_bios_boot_tab', 'right', 10, 2);
send_key_until_needlematch("vmware_bios_boot_${boot_device}", 'down', 5);
send_key_until_needlematch("vmware_bios_boot_top_${boot_device}", '+', 5);
send_key 'f10';
assert_screen('vmware_bios_boot_confirm');
send_key 'ret';
assert_screen('vmware_bios_frontpage');
send_key 'ret';
}
sub search_image_on_svirt_host {
my ($svirt, $file, $dir) = @_;
my $basename = basename($file);
my $domain = check_var('VIRSH_VMM_FAMILY', 'vmware') ? 'sshVMwareServer' : undef;
# Need to use only commands, which are on all platforms
# (e.g. Linux, VMware ESXi). E.g. `tr' is not on VMware ESXi.
my $path = $svirt->get_cmd_output("find $dir -name $basename | head -n1 | awk 1 ORS=''", {domain => $domain});
die "Unable to find image $basename in $dir" unless $path;
diag("Image found: $path");
enter_cmd("# Copying image $basename...");
return $path;
}
sub run {
my $arch = get_var('ARCH');
my $vmm_family = get_required_var('VIRSH_VMM_FAMILY');
my $vmm_type = get_required_var('VIRSH_VMM_TYPE');
my $svirt = select_console('svirt');
my $name = $svirt->name;
my $repo;
my $vmware_openqa_datastore;
# Clear datastore on VMware host
if (check_var('VIRSH_VMM_FAMILY', 'vmware')) {
$vmware_openqa_datastore = "/vmfs/volumes/" . get_required_var('VMWARE_DATASTORE') . "/openQA/";
$svirt->get_cmd_output("set -x; rm -f ${vmware_openqa_datastore}*${name}*", {domain => 'sshVMwareServer'});
}
# Workaround before fix in svirt (https://github.com/os-autoinst/os-autoinst/pull/901) is deployed
my $n = get_var('NUMDISKS', 1);
set_var('NUMDISKS', defined get_var('RAIDLEVEL') ? 4 : $n);
my $xenconsole = "hvc0";
if (!get_var('SP2ORLATER')) {
$xenconsole = "xvc0";
}
set_var('BOOTFROM', 'c') if get_var('BOOT_HDD_IMAGE');
my $boot_device = '';
if (check_var('BOOTFROM', 'c')) {
$boot_device = 'hd';
}
elsif (check_var('BOOTFROM', 'd') || get_var('ISO')) {
$boot_device = 'cdrom';
} else {
record_info("No boot medium", "Failed to select a bootable medium, please check ISO,"
. "BOOT_FROM and BOOT_HDD_IMAGE settings",
result => 'fail'
);
}
# Does not make any difference on VMware. For ad hoc device selection
# see vmware_select_boot_device_from_menu().
$svirt->change_domain_element(os => boot => {dev => $boot_device}) unless is_vmware;
# Unless os-autoinst PR#956 is deployed we have to remove 'on_reboot' first
# This has no effect on VMware ('restart' is kept).
$svirt->change_domain_element(on_reboot => undef);
$svirt->change_domain_element(on_reboot => 'destroy');
# This needs to be set by the user per environment on VMware (e.g to '/vmfs/volumes')
get_required_var('VIRSH_OPENQA_BASEDIR') if check_var('VIRSH_VMM_FAMILY', 'vmware');
my $dev_id = 'a';
my $basedir = svirt_host_basedir();
# This part of the path-to-image is missing on VMware
my $share_factory = check_var('VIRSH_VMM_FAMILY', 'vmware') ? '' : 'share/factory/';
my $isodir = "$basedir/openqa/${share_factory}iso/ $basedir/openqa/${share_factory}iso/fixed/";
# In netinstall we don't have ISO media, for the rest we attach it, if it's defined
if (my $isofile = get_var('ISO')) {
my $isopath = search_image_on_svirt_host($svirt, $isofile, $isodir);
$svirt->add_disk(
{
cdrom => 1,
file => $isopath,
dev_id => $dev_id
});
$dev_id = chr((ord $dev_id) + 1); # return next letter in alphabet
(undef, $isodir) = fileparse($isopath);
}
# Add addon media (if present at all)
foreach my $n (1 .. 9) {
if (my $addon_isofile = get_var("ISO_" . $n)) {
my $addon_isopath = search_image_on_svirt_host($svirt, $addon_isofile, $isodir);
$svirt->add_disk(
{
cdrom => 1,
file => $addon_isopath,
dev_id => $dev_id
});
$dev_id = chr((ord $dev_id) + 1); # return next letter in alphabet
}
}
my $hdddir = "$basedir/openqa/${share_factory}hdd $basedir/openqa/${share_factory}hdd/fixed";
my $size_i = get_var('HDDSIZEGB', '10');
foreach my $n (1 .. get_var('NUMDISKS')) {
if (my $full_hdd = get_var('HDD_' . $n)) {
my $hdd = basename($full_hdd);
my $hddpath = search_image_on_svirt_host($svirt, $hdd, $hdddir);
if ($hddpath =~ m/vmdk\.xz$/) {
my $nfs_ro = $hddpath;
$hddpath = "$vmware_openqa_datastore/$hdd" =~ s/vmdk\.xz/vmdk/r;
# do nothing if the image is already unpacked in datastore
if ($svirt->run_cmd("test -e $hddpath", domain => 'sshVMwareServer')) {
my $ret = $svirt->run_cmd("cp $nfs_ro $vmware_openqa_datastore", domain => 'sshVMwareServer');
die "Image copy to datastore failed!\n" if $ret;
$ret = $svirt->run_cmd("xz --decompress --keep --verbose $vmware_openqa_datastore/$hdd", domain => 'sshVMwareServer');
die "Image decompress in datastore failed!\n" if $ret;
}
}
$svirt->add_disk(
{
backingfile => 1,
dev_id => $dev_id,
file => $hddpath
});
}
else {
$svirt->add_disk(
{
create => 1,
dev_id => $dev_id,
size => $size_i . 'G'
});
}
$dev_id = chr((ord $dev_id) + 1); # return next letter in alphabet
}
## Verify checksum of the copied images
my $location = '/var/lib/libvirt/images/';
if (is_vmware) {
$location = get_var('BOOT_HDD_IMAGE') ? $vmware_openqa_datastore : $isodir;
}
my $errors = verify_checksum $location;
record_info("Checksum", $errors, result => 'fail') if $errors;
# We need to use 'tablet' as a pointer device, i.e. a device
# with absolute axis. That needs to be explicitely configured
# on KVM and Xen HVM only. VMware and Xen PV add pointer
# device with absolute axis by default.
if (($vmm_family eq 'kvm') or ($vmm_family eq 'xen' and $vmm_type eq 'hvm')) {
$svirt->add_input({type => 'tablet', bus => 'usb'});
$svirt->add_input({type => 'keyboard', bus => 'ps2'});
}
my $console_target_type;
if ($vmm_family eq 'xen' && $vmm_type eq 'linux') {
$console_target_type = 'xen';
}
else {
$console_target_type = 'serial';
}
# esx driver in libvirt does not support `virsh console' command. We need
# to export it on our own via TCP.
my $pty_dev_type;
if ($vmm_family eq 'vmware') {
$pty_dev_type = 'tcp';
}
else {
$pty_dev_type = 'pty';
}
my $protocol_type;
my $source = 0;
if ($vmm_family eq 'vmware') {
$protocol_type = 'raw';
$source = 1;
}
$svirt->add_pty(
{
pty_dev => 'console',
pty_dev_type => $pty_dev_type,
target_type => $console_target_type,
target_port => '0',
protocol_type => $protocol_type,
source => $source
});
if (!($vmm_family eq 'xen' && $vmm_type eq 'linux')) {
$svirt->add_pty(
{
pty_dev => 'serial',
pty_dev_type => $pty_dev_type,
target_port => '0',
protocol_type => $protocol_type,
source => $source
});
}
$svirt->add_vnc({port => get_var('VIRSH_INSTANCE', 1) + 5900});
my %ifacecfg = ();
# VMs should be specified with known-to-work network interface.
# Xen PV and Hyper-V use streams.
my $iface_model;
if ($vmm_family eq 'kvm') {
$iface_model = 'virtio';
}
elsif ($vmm_family eq 'xen') {
$ifacecfg{type} = 'bridge';
$ifacecfg{source} = {bridge => 'br0'};
$ifacecfg{virtualport} = {type => 'openvswitch'};
$ifacecfg{mac} = {address => genmac('00:16:3e')};
$iface_model = 'netfront';
}
elsif ($vmm_family eq 'vmware') {
$iface_model = 'e1000';
} else {
die "Unsupported value of *VIRSH_VMM_FAMILY*\n";
}
if ($iface_model) {
$ifacecfg{model} = {type => $iface_model};
}
if ($vmm_family eq 'vmware') {
# `virsh iface-list' won't produce correct bridge name for VMware.
# It should be provided by the worker or relied upon the default.
$ifacecfg{type} = 'bridge';
$ifacecfg{source} = {bridge => get_var('VMWARE_BRIDGE', 'VM Network')};
}
elsif ($vmm_family eq 'kvm') {
$ifacecfg{type} = 'user';
# This is the default MAC address for user mode networking; same in qemu backend
$ifacecfg{mac} = {address => '52:54:00:12:34:56'};
}
else {
# We can use bridge or network as a base for network interface. Network named 'default'
# happens to be omnipresent on workstations, bridges (br0, ...) on servers. If both 'default'
# network and bridge are defined and active, bridge should be prefered as 'default' network
# does not work.
if (my $bridges = $svirt->get_cmd_output("virsh iface-list --all | grep -w active | awk '{ print \$1 }' | tail -n1 | tr -d '\\n'")) {
$ifacecfg{type} = 'bridge';
$ifacecfg{source} = {bridge => $bridges};
}
elsif (my $networks = $svirt->get_cmd_output("virsh net-list --all | grep -w active | awk '{ print \$1 }' | tail -n1 | tr -d '\\n'")) {
$ifacecfg{type} = 'network';
$ifacecfg{source} = {network => $networks};
}
}
$svirt->add_interface(\%ifacecfg);
$svirt->define_and_start;
# Variable set only in console (here sshVirtsh console) does not propagate
# to test environment correctly and can be destroyed by bmwqemu::load_vars(),
# e.g. via set_var('...', ..., reload_needles => 1).
set_var('VMWARE_REMOTE_VMM', $svirt->get_remote_vmm) if is_vmware;
# This sets kernel argument so needle-matching works on Xen PV. It's being
# done via host's PTY device because we don't see anything unless kernel
# sets framebuffer (this is a GRUB2's limitation bsc#961638).
if ($vmm_family eq 'xen' and $vmm_type eq 'linux') {
$svirt->suspend;
my $cmdline = '';
$cmdline .= 'textmode=1 ' if check_var('VIDEOMODE', 'text');
$cmdline .= 'rescue=1 ' if is_installcheck || is_rescuesystem;
$cmdline .= get_var('EXTRABOOTPARAMS') . ' ' if get_var('EXTRABOOTPARAMS');
$cmdline .= registration_bootloader_cmdline . ' ' if check_var('SCC_REGISTER', 'installation');
enter_cmd "export pty=`virsh dumpxml $name | grep \"console type=\" | sed \"s/'/ /g\" | awk '{ print \$5 }'`";
enter_cmd "echo \$pty";
$svirt->resume;
wait_serial("Press enter to boot the selected OS", 10) || die "Can't get to GRUB";
# Do not boot OS from disk, select installation medium
if (!get_var('BOOT_HDD_IMAGE') && get_var('ISO') && get_var('HDD_1') && !is_jeos && !is_microos) {
enter_cmd "echo -en '\\033[B' > \$pty"; # key down
}
enter_cmd "echo e > \$pty"; # edit
my $max = (!is_jeos) ? 2 : (is_sle '<15-sp1') ? 4 : 13;
enter_cmd "echo -en '\\033[B' > \$pty" for (1 .. $max); # $max-times key down
enter_cmd "echo -en '\\033[K' > \$pty"; # end of line
if (is_sle '12-SP2+') {
enter_cmd "echo -en ' xen-fbfront.video=32,1024,768 xen-kbdfront.ptr_size=1024,768' > \$pty"; # set kernel framebuffer
enter_cmd "echo -en ' console=hvc console=tty' > \$pty"; # set consoles
}
else {
enter_cmd "echo -en ' xenfb.video=4,1024,768 ' > \$pty"; # set kernel framebuffer
enter_cmd "echo -en ' console=xvc console=tty ' > \$pty"; # set consoles
$cmdline .= 'linemode=0 '; # workaround for bsc#1066919
}
enter_cmd "echo -en ' $cmdline' > \$pty";
enter_cmd "echo -en '\\x18' > \$pty"; # send Ctrl-x to boot guest kernel
save_screenshot;
}
# connects to a guest VNC session
select_console('sut', await_console => 0);
vmware_set_permanent_boot_device($boot_device);
}
1;
| 40.299401 | 143 | 0.584473 |
73e7376168848b614dc66104abcad61cafe2cceb
| 4,323 |
pm
|
Perl
|
lib/MusicBrainz/Server/Edit/Medium/SetTrackLengths.pm
|
monicaq21/musicbrainz-server
|
a570d0ed9aaef0db4fe063488e5a016861f6eb10
|
[
"BSD-2-Clause"
] | 2 |
2019-03-14T05:31:35.000Z
|
2019-03-14T05:32:01.000Z
|
lib/MusicBrainz/Server/Edit/Medium/SetTrackLengths.pm
|
DavisDevasia/musicbrainz-server
|
d188dc521eb7bc20436aa2601f469f524b7b7ea1
|
[
"BSD-2-Clause"
] | 2 |
2021-05-12T00:15:55.000Z
|
2022-02-14T04:56:24.000Z
|
lib/MusicBrainz/Server/Edit/Medium/SetTrackLengths.pm
|
DavisDevasia/musicbrainz-server
|
d188dc521eb7bc20436aa2601f469f524b7b7ea1
|
[
"BSD-2-Clause"
] | null | null | null |
package MusicBrainz::Server::Edit::Medium::SetTrackLengths;
use Moose;
use namespace::autoclean;
use MooseX::Types::Moose qw( ArrayRef Int Str );
use MooseX::Types::Structured qw( Dict );
use MusicBrainz::Server::Constants qw( $EDIT_SET_TRACK_LENGTHS );
use MusicBrainz::Server::Edit::Types qw( Nullable );
use MusicBrainz::Server::Translation qw( N_l );
use aliased 'MusicBrainz::Server::Entity::CDTOC';
use aliased 'MusicBrainz::Server::Entity::Medium';
use aliased 'MusicBrainz::Server::Entity::MediumCDTOC';
use aliased 'MusicBrainz::Server::Entity::Release';
extends 'MusicBrainz::Server::Edit';
with 'MusicBrainz::Server::Edit::Release::RelatedEntities';
with 'MusicBrainz::Server::Edit::Medium';
with 'MusicBrainz::Server::Edit::Role::AlwaysAutoEdit';
sub edit_name { N_l('Set track lengths') }
sub edit_type { $EDIT_SET_TRACK_LENGTHS }
sub edit_kind { 'other' }
has '+data' => (
isa => Dict[
medium_id => Nullable[Int],
cdtoc => Dict[
id => Int,
toc => Str
],
affected_releases => ArrayRef[Dict[
id => Int,
name => Str,
]],
length => Dict[
# Old track lengths may be undef
old => ArrayRef[Nullable[Int]],
# But new tracks must be set if we have a toc
new => ArrayRef[Int],
]
]
);
sub release_ids {
my $self = shift;
return map { $_->{id} } @{ $self->data->{affected_releases} };
}
sub foreign_keys {
my $self = shift;
my $medium_id = $self->data->{medium_id};
return {
Release => {
map { $_ => [ 'ArtistCredit' ] } $self->release_ids
},
CDTOC => [ $self->data->{cdtoc}{id} ],
$medium_id ? (Medium => { $medium_id => [ 'Release ArtistCredit', 'MediumFormat' ] } ) : (),
}
}
sub build_display_data {
my ($self, $loaded) = @_;
my @mediums;
my $medium_id = $self->data->{medium_id};
if ($medium_id && $loaded->{Medium}{$medium_id}) {
@mediums = ($loaded->{Medium}{$medium_id});
# Edits that have a medium_id can't affect multiple releases.
} else {
@mediums = map {
Medium->new( release => $loaded->{Release}{ $_->{id} } //
Release->new( name => $_->{name} ) )
} @{ $self->data->{affected_releases} };
}
return {
cdtoc => $loaded->{CDTOC}{ $self->data->{cdtoc}{id} }
|| CDTOC->new_from_toc( $self->data->{cdtoc}{toc} ),
mediums => \@mediums,
length => {
map { $_ => $self->data->{length}{$_} } qw( old new )
}
}
}
sub initialize {
my ($self, %opts) = @_;
my $medium_id = $opts{medium_id}
or die 'Missing medium ID';
my $cdtoc_id = $opts{cdtoc_id}
or die 'Missing CDTOC ID';
my $medium = $self->c->model('Medium')->get_by_id($medium_id);
$self->c->model('Release')->load($medium);
$self->c->model('ArtistCredit')->load($medium->release);
$self->c->model('Track')->load_for_mediums($medium);
my $cdtoc = $self->c->model('CDTOC')->get_by_id($cdtoc_id);
MusicBrainz::Server::Edit::Exceptions::NoChanges->throw
if MediumCDTOC->new(cdtoc => $cdtoc, medium => $medium)->is_perfect_match;
$self->data({
medium_id => $medium_id,
cdtoc => {
id => $cdtoc_id,
toc => $cdtoc->toc
},
affected_releases => [ map +{
id => $_->id,
name => $_->name
}, $medium->release ] ,
length => {
old => [ map { $_->length } @{ $medium->cdtoc_tracks } ],
new => [ map { $_->{length_time} } @{ $cdtoc->track_details } ],
}
})
}
sub accept {
my $self = shift;
my $medium_id = $self->data->{medium_id};
if (!$self->c->model('Medium')->get_by_id($medium_id)) {
MusicBrainz::Server::Edit::Exceptions::FailedDependency->throw(
'The medium to set track times for no longer exists. It may '.
'have been merged or removed since this edit was entered.'
);
}
$self->c->model('Medium')->set_lengths_to_cdtoc(
$medium_id, $self->data->{cdtoc}{id});
}
before restore => sub {
my ($self, $data) = @_;
delete $data->{tracklist_id};
};
__PACKAGE__->meta->make_immutable;
1;
| 29.408163 | 100 | 0.558409 |
ed04f00da77e4a18ef11c7d6bd728b0649fe12c3
| 210 |
t
|
Perl
|
t/01load.t
|
ystk/debian-libdatetime-timezone-perl
|
381730597e2354ba94d79755f72c59a61ddf450a
|
[
"Artistic-1.0-cl8"
] | null | null | null |
t/01load.t
|
ystk/debian-libdatetime-timezone-perl
|
381730597e2354ba94d79755f72c59a61ddf450a
|
[
"Artistic-1.0-cl8"
] | null | null | null |
t/01load.t
|
ystk/debian-libdatetime-timezone-perl
|
381730597e2354ba94d79755f72c59a61ddf450a
|
[
"Artistic-1.0-cl8"
] | null | null | null |
use strict;
use warnings;
use File::Spec;
use Test::More;
use lib File::Spec->catdir( File::Spec->curdir, 't' );
BEGIN { require 'check_datetime_version.pl' }
use_ok('DateTime::TimeZone');
done_testing();
| 15 | 54 | 0.7 |
ed39da1a27594a8d6510a8bdeb5aa0afe7fda515
| 1,067 |
t
|
Perl
|
t/00-compile.t
|
git-the-cpan/Catalyst-Plugin-Babelfish
|
456ff1e5785fb2a5d0f6d87ed36a9249daf20e4c
|
[
"MIT"
] | null | null | null |
t/00-compile.t
|
git-the-cpan/Catalyst-Plugin-Babelfish
|
456ff1e5785fb2a5d0f6d87ed36a9249daf20e4c
|
[
"MIT"
] | null | null | null |
t/00-compile.t
|
git-the-cpan/Catalyst-Plugin-Babelfish
|
456ff1e5785fb2a5d0f6d87ed36a9249daf20e4c
|
[
"MIT"
] | null | null | null |
use 5.006;
use strict;
use warnings;
# this test was generated with Dist::Zilla::Plugin::Test::Compile 2.041
use Test::More tests => 1 + ($ENV{AUTHOR_TESTING} ? 1 : 0);
my @module_files = (
'Catalyst/Plugin/Babelfish.pm'
);
# fake home for cpan-testers
use File::Temp;
local $ENV{HOME} = File::Temp::tempdir( CLEANUP => 1 );
my $inc_switch = -d 'blib' ? '-Mblib' : '-Ilib';
use File::Spec;
use IPC::Open3;
use IO::Handle;
open my $stdin, '<', File::Spec->devnull or die "can't open devnull: $!";
my @warnings;
for my $lib (@module_files)
{
# see L<perlfaq8/How can I capture STDERR from an external command?>
my $stderr = IO::Handle->new;
my $pid = open3($stdin, '>&STDERR', $stderr, $^X, $inc_switch, '-e', "require q[$lib]");
binmode $stderr, ':crlf' if $^O eq 'MSWin32';
my @_warnings = <$stderr>;
waitpid($pid, 0);
is($?, 0, "$lib loaded ok");
if (@_warnings)
{
warn @_warnings;
push @warnings, @_warnings;
}
}
is(scalar(@warnings), 0, 'no warnings found') if $ENV{AUTHOR_TESTING};
| 19.759259 | 92 | 0.60731 |
ed2cf2342ab30e28305fd7799c9a42a85077245c
| 874 |
t
|
Perl
|
t/VerifyResponse.t
|
boosterua/openid4perl
|
4814f6147c986b1c8d1ab5fd93dfaef3189b26d4
|
[
"Apache-2.0"
] | 1 |
2015-03-17T08:22:47.000Z
|
2015-03-17T08:22:47.000Z
|
t/VerifyResponse.t
|
tst2005googlecode/openid4perl
|
4814f6147c986b1c8d1ab5fd93dfaef3189b26d4
|
[
"Apache-2.0"
] | null | null | null |
t/VerifyResponse.t
|
tst2005googlecode/openid4perl
|
4814f6147c986b1c8d1ab5fd93dfaef3189b26d4
|
[
"Apache-2.0"
] | null | null | null |
use warnings;
use strict;
use Test::More qw( no_plan );
use English qw( -no_match_vars );
use Net::OpenID2::Message::VerifyResponse;
# test new with bad arg
eval{ Net::OpenID2::Message::VerifyResponse->new('foo') };
like( $EVAL_ERROR , qr/Invalid parameter/ , 'new(): bad arg' );
# test no-arg constructor
my $vr = Net::OpenID2::Message::VerifyResponse->new();
isa_ok( $vr , 'Net::OpenID2::Message::VerifyResponse' );
my $param_list = Net::OpenID2::Message::ParameterList->new( {} );
$vr = Net::OpenID2::Message::VerifyResponse->new( $param_list );
isa_ok( $vr , 'Net::OpenID2::Message::VerifyResponse' );
# test set/is signature_verified()
for( 0 , 1 ){
$vr->set_signature_verified( $_ );
if( $_ ){
ok( $vr->is_signature_verified , 'set/is signature_verified( 1 )' );
} else {
ok(!$vr->is_signature_verified , 'set/is signature_verified( 0 )' );
}
}
| 28.193548 | 72 | 0.671625 |
ed1bbf7d15c454120201e158bf855979ed8a0e57
| 3,069 |
pm
|
Perl
|
auto-lib/Paws/ACMPCA/CreateCertificateAuthorityAuditReport.pm
|
shogo82148/aws-sdk-perl
|
a87555a9d30dd1415235ebacd2715b2f7e5163c7
|
[
"Apache-2.0"
] | null | null | null |
auto-lib/Paws/ACMPCA/CreateCertificateAuthorityAuditReport.pm
|
shogo82148/aws-sdk-perl
|
a87555a9d30dd1415235ebacd2715b2f7e5163c7
|
[
"Apache-2.0"
] | 1 |
2021-05-26T19:13:58.000Z
|
2021-05-26T19:13:58.000Z
|
auto-lib/Paws/ACMPCA/CreateCertificateAuthorityAuditReport.pm
|
shogo82148/aws-sdk-perl
|
a87555a9d30dd1415235ebacd2715b2f7e5163c7
|
[
"Apache-2.0"
] | null | null | null |
package Paws::ACMPCA::CreateCertificateAuthorityAuditReport;
use Moose;
has AuditReportResponseFormat => (is => 'ro', isa => 'Str', required => 1);
has CertificateAuthorityArn => (is => 'ro', isa => 'Str', required => 1);
has S3BucketName => (is => 'ro', isa => 'Str', required => 1);
use MooseX::ClassAttribute;
class_has _api_call => (isa => 'Str', is => 'ro', default => 'CreateCertificateAuthorityAuditReport');
class_has _returns => (isa => 'Str', is => 'ro', default => 'Paws::ACMPCA::CreateCertificateAuthorityAuditReportResponse');
class_has _result_key => (isa => 'Str', is => 'ro');
1;
### main pod documentation begin ###
=head1 NAME
Paws::ACMPCA::CreateCertificateAuthorityAuditReport - Arguments for method CreateCertificateAuthorityAuditReport on L<Paws::ACMPCA>
=head1 DESCRIPTION
This class represents the parameters used for calling the method CreateCertificateAuthorityAuditReport on the
L<AWS Certificate Manager Private Certificate Authority|Paws::ACMPCA> service. Use the attributes of this class
as arguments to method CreateCertificateAuthorityAuditReport.
You shouldn't make instances of this class. Each attribute should be used as a named argument in the call to CreateCertificateAuthorityAuditReport.
=head1 SYNOPSIS
my $acm-pca = Paws->service('ACMPCA');
my $CreateCertificateAuthorityAuditReportResponse =
$acm -pca->CreateCertificateAuthorityAuditReport(
AuditReportResponseFormat => 'JSON',
CertificateAuthorityArn => 'MyArn',
S3BucketName => 'MyString',
);
# Results:
my $AuditReportId =
$CreateCertificateAuthorityAuditReportResponse->AuditReportId;
my $S3Key = $CreateCertificateAuthorityAuditReportResponse->S3Key;
# Returns a L<Paws::ACMPCA::CreateCertificateAuthorityAuditReportResponse> object.
Values for attributes that are native types (Int, String, Float, etc) can passed as-is (scalar values). Values for complex Types (objects) can be passed as a HashRef. The keys and values of the hashref will be used to instance the underlying object.
For the AWS API documentation, see L<https://docs.aws.amazon.com/goto/WebAPI/acm-pca/CreateCertificateAuthorityAuditReport>
=head1 ATTRIBUTES
=head2 B<REQUIRED> AuditReportResponseFormat => Str
The format in which to create the report. This can be either B<JSON> or
B<CSV>.
Valid values are: C<"JSON">, C<"CSV">
=head2 B<REQUIRED> CertificateAuthorityArn => Str
The Amazon Resource Name (ARN) of the CA to be audited. This is of the
form:
C<arn:aws:acm-pca:I<region>:I<account>:certificate-authority/I<12345678-1234-1234-1234-123456789012>
>.
=head2 B<REQUIRED> S3BucketName => Str
The name of the S3 bucket that will contain the audit report.
=head1 SEE ALSO
This class forms part of L<Paws>, documenting arguments for method CreateCertificateAuthorityAuditReport in L<Paws::ACMPCA>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: L<https://github.com/pplu/aws-sdk-perl>
Please report bugs to: L<https://github.com/pplu/aws-sdk-perl/issues>
=cut
| 34.483146 | 249 | 0.747801 |
ed55674c431c16ff29ef4b063faf214374d4aa6e
| 11,128 |
t
|
Perl
|
t/variant-scalar.t
|
mknob/p5-opcua-open62541
|
394891b5d18384c50a65f5bd6067c9665bad051a
|
[
"Artistic-1.0"
] | 2 |
2020-02-11T11:08:10.000Z
|
2021-07-29T08:19:09.000Z
|
t/variant-scalar.t
|
mknob/p5-opcua-open62541
|
394891b5d18384c50a65f5bd6067c9665bad051a
|
[
"Artistic-1.0"
] | 5 |
2020-02-13T12:06:51.000Z
|
2021-08-09T18:19:00.000Z
|
t/variant-scalar.t
|
mknob/p5-opcua-open62541
|
394891b5d18384c50a65f5bd6067c9665bad051a
|
[
"Artistic-1.0"
] | 3 |
2020-02-12T15:34:48.000Z
|
2020-02-25T13:11:47.000Z
|
use strict;
use warnings;
use OPCUA::Open62541 ':TYPES';
use Test::More tests => 112;
use Test::Exception;
use Test::LeakTrace;
use Test::NoWarnings;
use Test::Warn;
ok(my $variant = OPCUA::Open62541::Variant->new(), "variant new");
no_leaks_ok { $variant->setScalar(1, TYPES_SBYTE) } "scalar leak";
is($variant->getScalar(), 1, "scalar");
warning_like { $variant->setScalar(undef, TYPES_SBYTE) }
(qr/Use of uninitialized value in subroutine entry/, "value undef warn");
no_leaks_ok {
no warnings 'uninitialized';
$variant->setScalar(undef, TYPES_SBYTE);
} "value undef leak";
warning_like { $variant->setScalar(3, undef) }
(qr/Use of uninitialized value in subroutine entry/, "type undef warn");
no_leaks_ok {
no warnings 'uninitialized';
$variant->setScalar(3, undef)
} "type undef leak";
is($variant->getScalar(), 1, "type undef");
warning_like { $variant->setScalar("", TYPES_SBYTE) }
(qr/Argument "" isn't numeric in subroutine entry/, "value string warn");
no_leaks_ok {
no warnings 'numeric';
$variant->setScalar("", TYPES_SBYTE);
} "value string leak";
is($variant->getScalar(), 0, "value string");
warning_like { $variant->setScalar(2, "") }
(qr/Argument "" isn't numeric in subroutine entry/, "type string warn");
no_leaks_ok {
no warnings 'numeric';
$variant->setScalar(2, "")
} "type string leak";
is($variant->getScalar(), 1, "type string");
throws_ok { $variant->setScalar("", OPCUA::Open62541::TYPES_COUNT) }
(qr/Unsigned value .* not below UA_TYPES_COUNT /, "set below COUNT");
no_leaks_ok { eval {
$variant->setScalar("", OPCUA::Open62541::TYPES_COUNT)
} } "set below COUNT leak";
throws_ok { $variant->setScalar("", -1) }
(qr/Unsigned value .* not below UA_TYPES_COUNT /, "set type -1");
no_leaks_ok { eval { $variant->setScalar("", -1) } } "set type -1 leak";
$variant->setScalar(1, TYPES_SBYTE);
ok($variant->hasScalarType(TYPES_SBYTE), "has type");
no_leaks_ok { $variant->hasScalarType(TYPES_SBYTE) } "has type leak";
ok(!$variant->hasScalarType(TYPES_BYTE), "has type false");
throws_ok { $variant->hasScalarType(OPCUA::Open62541::TYPES_COUNT) }
(qr/Unsigned value .* not below UA_TYPES_COUNT /, "has type below COUNT");
no_leaks_ok { eval {
$variant->hasScalarType(OPCUA::Open62541::TYPES_COUNT)
} } "has type below COUNT leak";
throws_ok { $variant->hasScalarType(-1) }
(qr/Unsigned value .* not below UA_TYPES_COUNT /, "has type -1");
no_leaks_ok { eval { $variant->hasScalarType(-1) } } "has type -1";
$variant->setScalar(OPCUA::Open62541::TRUE, TYPES_BOOLEAN);
is($variant->getScalar(), 1, "scalar TYPES_BOOLEAN TRUE");
$variant->setScalar(1, TYPES_BOOLEAN);
is($variant->getScalar(), 1, "scalar TYPES_BOOLEAN 1");
$variant->setScalar(2, TYPES_BOOLEAN);
is($variant->getScalar(), 1, "scalar TYPES_BOOLEAN 2");
$variant->setScalar('1', TYPES_BOOLEAN);
is($variant->getScalar(), 1, "scalar TYPES_BOOLEAN '1'");
$variant->setScalar('foo', TYPES_BOOLEAN);
is($variant->getScalar(), 1, "scalar TYPES_BOOLEAN 'foo'");
$variant->setScalar(OPCUA::Open62541::FALSE, TYPES_BOOLEAN);
is($variant->getScalar(), '', "scalar TYPES_BOOLEAN FALSE");
$variant->setScalar(undef, TYPES_BOOLEAN);
is($variant->getScalar(), '', "scalar TYPES_BOOLEAN undef");
$variant->setScalar(0, TYPES_BOOLEAN);
is($variant->getScalar(), '', "scalar TYPES_BOOLEAN 0");
$variant->setScalar('0', TYPES_BOOLEAN);
is($variant->getScalar(), '', "scalar TYPES_BOOLEAN '0'");
$variant->setScalar('', TYPES_BOOLEAN);
is($variant->getScalar(), '', "scalar TYPES_BOOLEAN ''");
ok($variant->hasScalarType(TYPES_BOOLEAN), "variant TYPES_BOOLEAN");
is($variant->getType(), TYPES_BOOLEAN, "type TYPES_BOOLEAN");
$variant->setScalar(0, TYPES_SBYTE);
is($variant->getScalar(), 0, "scalar TYPES_SBYTE 0");
$variant->setScalar(-128, TYPES_SBYTE);
is($variant->getScalar(), -128, "scalar TYPES_SBYTE -128");
$variant->setScalar(127, TYPES_SBYTE);
is($variant->getScalar(), 127, "scalar TYPES_SBYTE 127");
throws_ok { $variant->setScalar(-129, TYPES_SBYTE) }
(qr/Integer value -129 less than UA_SBYTE_MIN /, "sbyte min");
no_leaks_ok { eval { $variant->setScalar(-129, TYPES_SBYTE) } }
"sbyte min leak";
throws_ok { $variant->setScalar(128, TYPES_SBYTE) }
(qr/Integer value 128 greater than UA_SBYTE_MAX /, "sbyte max");
no_leaks_ok { eval { $variant->setScalar(128, TYPES_SBYTE) } }
"sbyte max leak";
ok($variant->hasScalarType(TYPES_SBYTE), "variant TYPES_SBYTE");
is($variant->getType(), TYPES_SBYTE, "type TYPES_SBYTE");
$variant->setScalar(0, TYPES_BYTE);
is($variant->getScalar(), 0, "scalar TYPES_BYTE 0");
$variant->setScalar(255, TYPES_BYTE);
is($variant->getScalar(), 255, "scalar TYPES_BYTE 255");
throws_ok { $variant->setScalar(256, TYPES_BYTE) }
(qr/Unsigned value 256 greater than UA_BYTE_MAX /, "byte max");
no_leaks_ok { eval { $variant->setScalar(256, TYPES_BYTE) } }
"byte max leak";
ok($variant->hasScalarType(TYPES_BYTE), "variant TYPES_BYTE");
is($variant->getType(), TYPES_BYTE, "type TYPES_BYTE");
$variant->setScalar(0, TYPES_INT16);
is($variant->getScalar(), 0, "scalar TYPES_INT16 0");
$variant->setScalar(-0x8000, TYPES_INT16);
is($variant->getScalar(), -0x8000, "scalar TYPES_INT16 -0x8000");
$variant->setScalar(0x7fff, TYPES_INT16);
is($variant->getScalar(), 0x7fff, "scalar TYPES_INT16 0x7fff");
throws_ok { $variant->setScalar(-0x8001, TYPES_INT16) }
(qr/Integer value -32769 less than UA_INT16_MIN /, "int16 min");
no_leaks_ok { eval { $variant->setScalar(-0x8001, TYPES_INT16) } }
"int16 min leak";
throws_ok { $variant->setScalar(0x8000, TYPES_INT16) }
(qr/Integer value 32768 greater than UA_INT16_MAX /, "int16 max");
no_leaks_ok { eval { $variant->setScalar(0x8000, TYPES_INT16) } }
"int16 max leak";
ok($variant->hasScalarType(TYPES_INT16), "variant TYPES_INT16");
is($variant->getType(), TYPES_INT16, "type TYPES_INT16");
$variant->setScalar(0, TYPES_UINT16);
is($variant->getScalar(), 0, "scalar TYPES_UINT16 0");
$variant->setScalar(0xffff, TYPES_UINT16);
is($variant->getScalar(), 0xffff, "scalar TYPES_UINT16 0xffff");
throws_ok { $variant->setScalar(0x10000, TYPES_UINT16) }
(qr/Unsigned value 65536 greater than UA_UINT16_MAX /, "uint16 max");
no_leaks_ok { eval { $variant->setScalar(0x10000, TYPES_UINT16) } }
"uint16 max leak";
ok($variant->hasScalarType(TYPES_UINT16), "variant TYPES_UINT16");
is($variant->getType(), TYPES_UINT16, "type TYPES_UINT16");
$variant->setScalar(0, TYPES_INT32);
is($variant->getScalar(), 0, "scalar TYPES_INT32 0");
$variant->setScalar(-0x80000000, TYPES_INT32);
is($variant->getScalar(), -0x80000000, "scalar TYPES_INT32 -0x80000000");
$variant->setScalar(0x7fffffff, TYPES_INT32);
is($variant->getScalar(), 0x7fffffff, "scalar TYPES_INT32 0x7fffffff");
throws_ok { $variant->setScalar(-0x80000001, TYPES_INT32) }
(qr/Integer value -2147483649 less than UA_INT32_MIN /, "int32 min");
no_leaks_ok { eval { $variant->setScalar(-0x80000001, TYPES_INT32) } }
"int32 min leak";
throws_ok { $variant->setScalar(0x80000000, TYPES_INT32) }
(qr/Integer value 2147483648 greater than UA_INT32_MAX /, "int32 max");
no_leaks_ok { eval { $variant->setScalar(0x80000000, TYPES_INT32) } }
"int32 max leak";
ok($variant->hasScalarType(TYPES_INT32), "variant TYPES_INT32");
is($variant->getType(), TYPES_INT32, "type TYPES_INT32");
$variant->setScalar(0, TYPES_UINT32);
is($variant->getScalar(), 0, "scalar TYPES_UINT32 0");
$variant->setScalar(0xffffffff, TYPES_UINT32);
is($variant->getScalar(), 0xffffffff, "scalar TYPES_UINT32 0xffffffff");
# XXX this only works for Perl on 64 bit platforms
throws_ok { $variant->setScalar(1<<32, TYPES_UINT32) }
(qr/Unsigned value 4294967296 greater than UA_UINT32_MAX /, "uint32 max");
no_leaks_ok { eval { $variant->setScalar(1<<32, TYPES_UINT32) } }
"uint32 max leak";
ok($variant->hasScalarType(TYPES_UINT32), "variant TYPES_UINT32");
is($variant->getType(), TYPES_UINT32, "type TYPES_UINT32");
# XXX this only works for Perl on 64 bit platforms
$variant->setScalar(0, TYPES_INT64);
is($variant->getScalar(), 0, "scalar TYPES_INT64 0");
$variant->setScalar(-(1<<63), TYPES_INT64);
is($variant->getScalar(), -(1<<63), "scalar TYPES_INT64 -(1<<63)");
$variant->setScalar((1<<63)-1, TYPES_INT64);
is($variant->getScalar(), (1<<63)-1, "scalar TYPES_INT64 (1<<63)-1");
# no overflow possible
ok($variant->hasScalarType(TYPES_INT64), "variant TYPES_INT64");
is($variant->getType(), TYPES_INT64, "type TYPES_INT64");
$variant->setScalar(0, TYPES_UINT64);
is($variant->getScalar(), 0, "scalar TYPES_UINT64 0");
$variant->setScalar(18446744073709551615, TYPES_UINT64);
is($variant->getScalar(), 18446744073709551615,
"scalar TYPES_UINT64 18446744073709551615");
# no overflow possible
ok($variant->hasScalarType(TYPES_UINT64), "variant TYPES_UINT64");
is($variant->getType(), TYPES_UINT64, "type TYPES_UINT64");
$variant->setScalar(0, TYPES_FLOAT);
is($variant->getScalar(), 0, "scalar TYPES_FLOAT 0");
$variant->setScalar(1.17549435082229E-38, TYPES_FLOAT);
is($variant->getScalar(), 1.17549435082229E-38, "scalar TYPES_FLOAT MIN");
$variant->setScalar(3.4028230607371E+38, TYPES_FLOAT);
is($variant->getScalar(), 3.4028230607371E+38, "scalar TYPES_FLOAT MAX");
throws_ok { $variant->setScalar(-3.40282347E+38, TYPES_FLOAT) }
(qr/Float value -3.402823e\+38 less than -3.402823e\+38 /,
"TYPES_FLOAT min");
no_leaks_ok { eval { $variant->setScalar(-3.40282347E+38, TYPES_FLOAT) } }
"TYPES_FLOAT min leak";
throws_ok { $variant->setScalar(3.40282347E+38, TYPES_FLOAT) }
(qr/Float value 3.402823e\+38 greater than 3.402823e\+38 /,
"TYPES_FLOAT max");
no_leaks_ok { eval { $variant->setScalar(3.40282347E+38, TYPES_FLOAT) } }
"TYPES_FLOAT max leak";
ok($variant->hasScalarType(TYPES_FLOAT), "variant TYPES_FLOAT");
is($variant->getType(), TYPES_FLOAT, "type TYPES_FLOAT");
$variant->setScalar(0, TYPES_DOUBLE);
is($variant->getScalar(), 0, "scalar TYPES_DOUBLE 0");
$variant->setScalar(2.2250738585072014E-308, TYPES_DOUBLE);
is($variant->getScalar(), 2.2250738585072014E-308, "scalar TYPES_DOUBLE MIN");
$variant->setScalar(1.7976931348623157E+308, TYPES_DOUBLE);
is($variant->getScalar(), 1.7976931348623157E+308, "scalar TYPES_DOUBLE MAX");
# no overflow possible
ok($variant->hasScalarType(TYPES_DOUBLE), "variant TYPES_DOUBLE");
is($variant->getType(), TYPES_DOUBLE, "type TYPES_DOUBLE");
my $g;
{
my $s = "foo";
no_leaks_ok {
my $v = OPCUA::Open62541::Variant->new();
$v->setScalar($s, TYPES_STRING);
$g = $v->getScalar();
} "string leak variant";
}
no_leaks_ok {
my $s = "foo";
{
my $v = OPCUA::Open62541::Variant->new();
$v->setScalar($s, TYPES_STRING);
$g = $v->getScalar();
}
} "leak string variant";
is($g, "foo", "string variant get");
{
my $v = OPCUA::Open62541::Variant->new();
no_leaks_ok {
my $s = "foo";
$v->setScalar($s, TYPES_STRING);
} "variant leak string";
$g = $v->getScalar();
}
no_leaks_ok {
my $v = OPCUA::Open62541::Variant->new();
{
my $s = "foo";
$v->setScalar($s, TYPES_STRING);
}
$g = $v->getScalar();
} "leak variant string";
is($g, "foo", "variant string get");
| 41.677903 | 78 | 0.706506 |
ed6a01be824a0f2a71f7099cf5268d968b0faff2
| 422 |
t
|
Perl
|
t/01_array/items_after.t
|
gitpan/List-Objects-WithUtils
|
2fd54ce519d686187c7239c29fd23bcad94e07f7
|
[
"Artistic-1.0"
] | null | null | null |
t/01_array/items_after.t
|
gitpan/List-Objects-WithUtils
|
2fd54ce519d686187c7239c29fd23bcad94e07f7
|
[
"Artistic-1.0"
] | null | null | null |
t/01_array/items_after.t
|
gitpan/List-Objects-WithUtils
|
2fd54ce519d686187c7239c29fd23bcad94e07f7
|
[
"Artistic-1.0"
] | null | null | null |
use Test::More;
use strict; use warnings FATAL => 'all';
use List::Objects::WithUtils 'array';
my $arr = array( 1 .. 7 );
my $after = $arr->items_after(sub { $_ == 3 });
is_deeply
[ $after->all ],
[ 4 .. 7 ],
'items_after ok';
ok $arr->items_after(sub { $_ > 10 })->is_empty,
'items_after empty resultset ok';
ok array->items_after(sub { $_ == 1 })->is_empty,
'items_after on empty array ok';
done_testing;
| 21.1 | 49 | 0.620853 |
ed069bbe1f96cd9f3d3c9f20ab3fee4a7fb4865c
| 113 |
pl
|
Perl
|
ejem1-1.pl
|
DiegoReiriz/PrologExercises
|
d9344656e238db19bb8e3b0c71c8f12abe533214
|
[
"MIT"
] | null | null | null |
ejem1-1.pl
|
DiegoReiriz/PrologExercises
|
d9344656e238db19bb8e3b0c71c8f12abe533214
|
[
"MIT"
] | null | null | null |
ejem1-1.pl
|
DiegoReiriz/PrologExercises
|
d9344656e238db19bb8e3b0c71c8f12abe533214
|
[
"MIT"
] | null | null | null |
chica(rosa).
chica(laura).
chica(ana).
chico(pedro).
chico(juan).
chico(pablo).
pareja(X,Y):- chico(X),chica(Y).
| 14.125 | 32 | 0.672566 |
ed7cc5676b39fa7c108c4fa3b53d47f117534c11
| 31,319 |
pm
|
Perl
|
lib/KBaseReport/KBaseReportClient.pm
|
mclark58/kb_fungalmodeling
|
53f549fb81b67c6e5b742409cdb57c9adcc98ec7
|
[
"MIT"
] | 1 |
2018-07-15T15:37:42.000Z
|
2018-07-15T15:37:42.000Z
|
lib/KBaseReport/KBaseReportClient.pm
|
mclark58/kb_fungalmodeling
|
53f549fb81b67c6e5b742409cdb57c9adcc98ec7
|
[
"MIT"
] | 7 |
2016-10-26T21:26:22.000Z
|
2019-09-11T21:19:28.000Z
|
lib/KBaseReport/KBaseReportClient.pm
|
mclark58/kb_fungalmodeling
|
53f549fb81b67c6e5b742409cdb57c9adcc98ec7
|
[
"MIT"
] | 6 |
2016-10-11T15:51:59.000Z
|
2019-09-10T19:15:17.000Z
|
package KBaseReport::KBaseReportClient;
use JSON::RPC::Client;
use POSIX;
use strict;
use Data::Dumper;
use URI;
use Bio::KBase::Exceptions;
use Time::HiRes;
my $get_time = sub { time, 0 };
eval {
require Time::HiRes;
$get_time = sub { Time::HiRes::gettimeofday() };
};
use Bio::KBase::AuthToken;
# Client version should match Impl version
# This is a Semantic Version number,
# http://semver.org
our $VERSION = "0.1.0";
=head1 NAME
KBaseReport::KBaseReportClient
=head1 DESCRIPTION
Module for a simple WS data object report type.
=cut
sub new
{
my($class, $url, @args) = @_;
my $self = {
client => KBaseReport::KBaseReportClient::RpcClient->new,
url => $url,
headers => [],
};
my %arg_hash = @args;
$self->{async_job_check_time} = 0.1;
if (exists $arg_hash{"async_job_check_time_ms"}) {
$self->{async_job_check_time} = $arg_hash{"async_job_check_time_ms"} / 1000.0;
}
$self->{async_job_check_time_scale_percent} = 150;
if (exists $arg_hash{"async_job_check_time_scale_percent"}) {
$self->{async_job_check_time_scale_percent} = $arg_hash{"async_job_check_time_scale_percent"};
}
$self->{async_job_check_max_time} = 300; # 5 minutes
if (exists $arg_hash{"async_job_check_max_time_ms"}) {
$self->{async_job_check_max_time} = $arg_hash{"async_job_check_max_time_ms"} / 1000.0;
}
my $service_version = 'release';
if (exists $arg_hash{"service_version"}) {
$service_version = $arg_hash{"service_version"};
}
$self->{service_version} = $service_version;
chomp($self->{hostname} = `hostname`);
$self->{hostname} ||= 'unknown-host';
#
# Set up for propagating KBRPC_TAG and KBRPC_METADATA environment variables through
# to invoked services. If these values are not set, we create a new tag
# and a metadata field with basic information about the invoking script.
#
if ($ENV{KBRPC_TAG})
{
$self->{kbrpc_tag} = $ENV{KBRPC_TAG};
}
else
{
my ($t, $us) = &$get_time();
$us = sprintf("%06d", $us);
my $ts = strftime("%Y-%m-%dT%H:%M:%S.${us}Z", gmtime $t);
$self->{kbrpc_tag} = "C:$0:$self->{hostname}:$$:$ts";
}
push(@{$self->{headers}}, 'Kbrpc-Tag', $self->{kbrpc_tag});
if ($ENV{KBRPC_METADATA})
{
$self->{kbrpc_metadata} = $ENV{KBRPC_METADATA};
push(@{$self->{headers}}, 'Kbrpc-Metadata', $self->{kbrpc_metadata});
}
if ($ENV{KBRPC_ERROR_DEST})
{
$self->{kbrpc_error_dest} = $ENV{KBRPC_ERROR_DEST};
push(@{$self->{headers}}, 'Kbrpc-Errordest', $self->{kbrpc_error_dest});
}
#
# This module requires authentication.
#
# We create an auth token, passing through the arguments that we were (hopefully) given.
{
my %arg_hash2 = @args;
if (exists $arg_hash2{"token"}) {
$self->{token} = $arg_hash2{"token"};
} elsif (exists $arg_hash2{"user_id"}) {
my $token = Bio::KBase::AuthToken->new(@args);
if (!$token->error_message) {
$self->{token} = $token->token;
}
}
if (exists $self->{token})
{
$self->{client}->{token} = $self->{token};
}
}
my $ua = $self->{client}->ua;
my $timeout = $ENV{CDMI_TIMEOUT} || (30 * 60);
$ua->timeout($timeout);
bless $self, $class;
# $self->_validate_version();
return $self;
}
sub _check_job {
my($self, @args) = @_;
# Authentication: ${method.authentication}
if ((my $n = @args) != 1) {
Bio::KBase::Exceptions::ArgumentValidationError->throw(error =>
"Invalid argument count for function _check_job (received $n, expecting 1)");
}
{
my($job_id) = @args;
my @_bad_arguments;
(!ref($job_id)) or push(@_bad_arguments, "Invalid type for argument 0 \"job_id\" (it should be a string)");
if (@_bad_arguments) {
my $msg = "Invalid arguments passed to _check_job:\n" . join("", map { "\t$_\n" } @_bad_arguments);
Bio::KBase::Exceptions::ArgumentValidationError->throw(error => $msg,
method_name => '_check_job');
}
}
my $result = $self->{client}->call($self->{url}, $self->{headers}, {
method => "KBaseReport._check_job",
params => \@args});
if ($result) {
if ($result->is_error) {
Bio::KBase::Exceptions::JSONRPC->throw(error => $result->error_message,
code => $result->content->{error}->{code},
method_name => '_check_job',
data => $result->content->{error}->{error} # JSON::RPC::ReturnObject only supports JSONRPC 1.1 or 1.O
);
} else {
return $result->result->[0];
}
} else {
Bio::KBase::Exceptions::HTTP->throw(error => "Error invoking method _check_job",
status_line => $self->{client}->status_line,
method_name => '_check_job');
}
}
=head2 create
$info = $obj->create($params)
=over 4
=item Parameter and return types
=begin html
<pre>
$params is a KBaseReport.CreateParams
$info is a KBaseReport.ReportInfo
CreateParams is a reference to a hash where the following keys are defined:
report has a value which is a KBaseReport.Report
workspace_name has a value which is a string
Report is a reference to a hash where the following keys are defined:
text_message has a value which is a string
warnings has a value which is a reference to a list where each element is a string
objects_created has a value which is a reference to a list where each element is a KBaseReport.WorkspaceObject
file_links has a value which is a reference to a list where each element is a KBaseReport.LinkedFile
html_links has a value which is a reference to a list where each element is a KBaseReport.LinkedFile
direct_html has a value which is a string
direct_html_link_index has a value which is an int
WorkspaceObject is a reference to a hash where the following keys are defined:
ref has a value which is a KBaseReport.ws_id
description has a value which is a string
ws_id is a string
LinkedFile is a reference to a hash where the following keys are defined:
handle has a value which is a KBaseReport.handle_ref
description has a value which is a string
name has a value which is a string
label has a value which is a string
URL has a value which is a string
handle_ref is a string
ReportInfo is a reference to a hash where the following keys are defined:
ref has a value which is a KBaseReport.ws_id
name has a value which is a string
</pre>
=end html
=begin text
$params is a KBaseReport.CreateParams
$info is a KBaseReport.ReportInfo
CreateParams is a reference to a hash where the following keys are defined:
report has a value which is a KBaseReport.Report
workspace_name has a value which is a string
Report is a reference to a hash where the following keys are defined:
text_message has a value which is a string
warnings has a value which is a reference to a list where each element is a string
objects_created has a value which is a reference to a list where each element is a KBaseReport.WorkspaceObject
file_links has a value which is a reference to a list where each element is a KBaseReport.LinkedFile
html_links has a value which is a reference to a list where each element is a KBaseReport.LinkedFile
direct_html has a value which is a string
direct_html_link_index has a value which is an int
WorkspaceObject is a reference to a hash where the following keys are defined:
ref has a value which is a KBaseReport.ws_id
description has a value which is a string
ws_id is a string
LinkedFile is a reference to a hash where the following keys are defined:
handle has a value which is a KBaseReport.handle_ref
description has a value which is a string
name has a value which is a string
label has a value which is a string
URL has a value which is a string
handle_ref is a string
ReportInfo is a reference to a hash where the following keys are defined:
ref has a value which is a KBaseReport.ws_id
name has a value which is a string
=end text
=item Description
Create a KBaseReport with a brief summary of an App run.
=back
=cut
sub create
{
my($self, @args) = @_;
my $job_id = $self->_create_submit(@args);
my $async_job_check_time = $self->{async_job_check_time};
while (1) {
Time::HiRes::sleep($async_job_check_time);
$async_job_check_time *= $self->{async_job_check_time_scale_percent} / 100.0;
if ($async_job_check_time > $self->{async_job_check_max_time}) {
$async_job_check_time = $self->{async_job_check_max_time};
}
my $job_state_ref = $self->_check_job($job_id);
if ($job_state_ref->{"finished"} != 0) {
if (!exists $job_state_ref->{"result"}) {
$job_state_ref->{"result"} = [];
}
return wantarray ? @{$job_state_ref->{"result"}} : $job_state_ref->{"result"}->[0];
}
}
}
sub _create_submit {
my($self, @args) = @_;
# Authentication: required
if ((my $n = @args) != 1) {
Bio::KBase::Exceptions::ArgumentValidationError->throw(error =>
"Invalid argument count for function _create_submit (received $n, expecting 1)");
}
{
my($params) = @args;
my @_bad_arguments;
(ref($params) eq 'HASH') or push(@_bad_arguments, "Invalid type for argument 1 \"params\" (value was \"$params\")");
if (@_bad_arguments) {
my $msg = "Invalid arguments passed to _create_submit:\n" . join("", map { "\t$_\n" } @_bad_arguments);
Bio::KBase::Exceptions::ArgumentValidationError->throw(error => $msg,
method_name => '_create_submit');
}
}
my $context = undef;
if ($self->{service_version}) {
$context = {'service_ver' => $self->{service_version}};
}
my $result = $self->{client}->call($self->{url}, $self->{headers}, {
method => "KBaseReport._create_submit",
params => \@args, context => $context});
if ($result) {
if ($result->is_error) {
Bio::KBase::Exceptions::JSONRPC->throw(error => $result->error_message,
code => $result->content->{error}->{code},
method_name => '_create_submit',
data => $result->content->{error}->{error} # JSON::RPC::ReturnObject only supports JSONRPC 1.1 or 1.O
);
} else {
return $result->result->[0]; # job_id
}
} else {
Bio::KBase::Exceptions::HTTP->throw(error => "Error invoking method _create_submit",
status_line => $self->{client}->status_line,
method_name => '_create_submit');
}
}
=head2 create_extended_report
$info = $obj->create_extended_report($params)
=over 4
=item Parameter and return types
=begin html
<pre>
$params is a KBaseReport.CreateExtendedReportParams
$info is a KBaseReport.ReportInfo
CreateExtendedReportParams is a reference to a hash where the following keys are defined:
message has a value which is a string
objects_created has a value which is a reference to a list where each element is a KBaseReport.WorkspaceObject
warnings has a value which is a reference to a list where each element is a string
html_links has a value which is a reference to a list where each element is a KBaseReport.File
direct_html has a value which is a string
direct_html_link_index has a value which is an int
file_links has a value which is a reference to a list where each element is a KBaseReport.File
report_object_name has a value which is a string
html_window_height has a value which is a float
summary_window_height has a value which is a float
workspace_name has a value which is a string
WorkspaceObject is a reference to a hash where the following keys are defined:
ref has a value which is a KBaseReport.ws_id
description has a value which is a string
ws_id is a string
File is a reference to a hash where the following keys are defined:
path has a value which is a string
shock_id has a value which is a string
name has a value which is a string
description has a value which is a string
ReportInfo is a reference to a hash where the following keys are defined:
ref has a value which is a KBaseReport.ws_id
name has a value which is a string
</pre>
=end html
=begin text
$params is a KBaseReport.CreateExtendedReportParams
$info is a KBaseReport.ReportInfo
CreateExtendedReportParams is a reference to a hash where the following keys are defined:
message has a value which is a string
objects_created has a value which is a reference to a list where each element is a KBaseReport.WorkspaceObject
warnings has a value which is a reference to a list where each element is a string
html_links has a value which is a reference to a list where each element is a KBaseReport.File
direct_html has a value which is a string
direct_html_link_index has a value which is an int
file_links has a value which is a reference to a list where each element is a KBaseReport.File
report_object_name has a value which is a string
html_window_height has a value which is a float
summary_window_height has a value which is a float
workspace_name has a value which is a string
WorkspaceObject is a reference to a hash where the following keys are defined:
ref has a value which is a KBaseReport.ws_id
description has a value which is a string
ws_id is a string
File is a reference to a hash where the following keys are defined:
path has a value which is a string
shock_id has a value which is a string
name has a value which is a string
description has a value which is a string
ReportInfo is a reference to a hash where the following keys are defined:
ref has a value which is a KBaseReport.ws_id
name has a value which is a string
=end text
=item Description
A more complex function to create a report that enables the user to specify files and html view that the report should link to
=back
=cut
sub create_extended_report
{
my($self, @args) = @_;
my $job_id = $self->_create_extended_report_submit(@args);
my $async_job_check_time = $self->{async_job_check_time};
while (1) {
Time::HiRes::sleep($async_job_check_time);
$async_job_check_time *= $self->{async_job_check_time_scale_percent} / 100.0;
if ($async_job_check_time > $self->{async_job_check_max_time}) {
$async_job_check_time = $self->{async_job_check_max_time};
}
my $job_state_ref = $self->_check_job($job_id);
if ($job_state_ref->{"finished"} != 0) {
if (!exists $job_state_ref->{"result"}) {
$job_state_ref->{"result"} = [];
}
return wantarray ? @{$job_state_ref->{"result"}} : $job_state_ref->{"result"}->[0];
}
}
}
sub _create_extended_report_submit {
my($self, @args) = @_;
# Authentication: required
if ((my $n = @args) != 1) {
Bio::KBase::Exceptions::ArgumentValidationError->throw(error =>
"Invalid argument count for function _create_extended_report_submit (received $n, expecting 1)");
}
{
my($params) = @args;
my @_bad_arguments;
(ref($params) eq 'HASH') or push(@_bad_arguments, "Invalid type for argument 1 \"params\" (value was \"$params\")");
if (@_bad_arguments) {
my $msg = "Invalid arguments passed to _create_extended_report_submit:\n" . join("", map { "\t$_\n" } @_bad_arguments);
Bio::KBase::Exceptions::ArgumentValidationError->throw(error => $msg,
method_name => '_create_extended_report_submit');
}
}
my $context = undef;
if ($self->{service_version}) {
$context = {'service_ver' => $self->{service_version}};
}
my $result = $self->{client}->call($self->{url}, $self->{headers}, {
method => "KBaseReport._create_extended_report_submit",
params => \@args, context => $context});
if ($result) {
if ($result->is_error) {
Bio::KBase::Exceptions::JSONRPC->throw(error => $result->error_message,
code => $result->content->{error}->{code},
method_name => '_create_extended_report_submit',
data => $result->content->{error}->{error} # JSON::RPC::ReturnObject only supports JSONRPC 1.1 or 1.O
);
} else {
return $result->result->[0]; # job_id
}
} else {
Bio::KBase::Exceptions::HTTP->throw(error => "Error invoking method _create_extended_report_submit",
status_line => $self->{client}->status_line,
method_name => '_create_extended_report_submit');
}
}
sub status
{
my($self, @args) = @_;
my $job_id = undef;
if ((my $n = @args) != 0) {
Bio::KBase::Exceptions::ArgumentValidationError->throw(error =>
"Invalid argument count for function status (received $n, expecting 0)");
}
my $context = undef;
if ($self->{service_version}) {
$context = {'service_ver' => $self->{service_version}};
}
my $result = $self->{client}->call($self->{url}, $self->{headers}, {
method => "KBaseReport._status_submit",
params => \@args, context => $context});
if ($result) {
if ($result->is_error) {
Bio::KBase::Exceptions::JSONRPC->throw(error => $result->error_message,
code => $result->content->{error}->{code},
method_name => '_status_submit',
data => $result->content->{error}->{error} # JSON::RPC::ReturnObject only supports JSONRPC 1.1 or 1.O
);
} else {
$job_id = $result->result->[0];
}
} else {
Bio::KBase::Exceptions::HTTP->throw(error => "Error invoking method _status_submit",
status_line => $self->{client}->status_line,
method_name => '_status_submit');
}
my $async_job_check_time = $self->{async_job_check_time};
while (1) {
Time::HiRes::sleep($async_job_check_time);
$async_job_check_time *= $self->{async_job_check_time_scale_percent} / 100.0;
if ($async_job_check_time > $self->{async_job_check_max_time}) {
$async_job_check_time = $self->{async_job_check_max_time};
}
my $job_state_ref = $self->_check_job($job_id);
if ($job_state_ref->{"finished"} != 0) {
if (!exists $job_state_ref->{"result"}) {
$job_state_ref->{"result"} = [];
}
return wantarray ? @{$job_state_ref->{"result"}} : $job_state_ref->{"result"}->[0];
}
}
}
sub version {
my ($self) = @_;
my $result = $self->{client}->call($self->{url}, $self->{headers}, {
method => "KBaseReport.version",
params => [],
});
if ($result) {
if ($result->is_error) {
Bio::KBase::Exceptions::JSONRPC->throw(
error => $result->error_message,
code => $result->content->{code},
method_name => 'create_extended_report',
);
} else {
return wantarray ? @{$result->result} : $result->result->[0];
}
} else {
Bio::KBase::Exceptions::HTTP->throw(
error => "Error invoking method create_extended_report",
status_line => $self->{client}->status_line,
method_name => 'create_extended_report',
);
}
}
sub _validate_version {
my ($self) = @_;
my $svr_version = $self->version();
my $client_version = $VERSION;
my ($cMajor, $cMinor) = split(/\./, $client_version);
my ($sMajor, $sMinor) = split(/\./, $svr_version);
if ($sMajor != $cMajor) {
Bio::KBase::Exceptions::ClientServerIncompatible->throw(
error => "Major version numbers differ.",
server_version => $svr_version,
client_version => $client_version
);
}
if ($sMinor < $cMinor) {
Bio::KBase::Exceptions::ClientServerIncompatible->throw(
error => "Client minor version greater than Server minor version.",
server_version => $svr_version,
client_version => $client_version
);
}
if ($sMinor > $cMinor) {
warn "New client version available for KBaseReport::KBaseReportClient\n";
}
if ($sMajor == 0) {
warn "KBaseReport::KBaseReportClient version is $svr_version. API subject to change.\n";
}
}
=head1 TYPES
=head2 ws_id
=over 4
=item Description
@id ws
=item Definition
=begin html
<pre>
a string
</pre>
=end html
=begin text
a string
=end text
=back
=head2 handle_ref
=over 4
=item Description
Reference to a handle
@id handle
=item Definition
=begin html
<pre>
a string
</pre>
=end html
=begin text
a string
=end text
=back
=head2 WorkspaceObject
=over 4
=item Description
Represents a Workspace object with some brief description text
that can be associated with the object.
@optional description
=item Definition
=begin html
<pre>
a reference to a hash where the following keys are defined:
ref has a value which is a KBaseReport.ws_id
description has a value which is a string
</pre>
=end html
=begin text
a reference to a hash where the following keys are defined:
ref has a value which is a KBaseReport.ws_id
description has a value which is a string
=end text
=back
=head2 LinkedFile
=over 4
=item Description
Represents a file or html archive that the report should like to
@optional description label
=item Definition
=begin html
<pre>
a reference to a hash where the following keys are defined:
handle has a value which is a KBaseReport.handle_ref
description has a value which is a string
name has a value which is a string
label has a value which is a string
URL has a value which is a string
</pre>
=end html
=begin text
a reference to a hash where the following keys are defined:
handle has a value which is a KBaseReport.handle_ref
description has a value which is a string
name has a value which is a string
label has a value which is a string
URL has a value which is a string
=end text
=back
=head2 Report
=over 4
=item Description
A simple Report of a method run in KBase.
It only provides for now a way to display a fixed width text output summary message, a
list of warnings, and a list of objects created (each with descriptions).
@optional warnings file_links html_links direct_html direct_html_link_index
@metadata ws length(warnings) as Warnings
@metadata ws length(text_message) as Size(characters)
@metadata ws length(objects_created) as Objects Created
=item Definition
=begin html
<pre>
a reference to a hash where the following keys are defined:
text_message has a value which is a string
warnings has a value which is a reference to a list where each element is a string
objects_created has a value which is a reference to a list where each element is a KBaseReport.WorkspaceObject
file_links has a value which is a reference to a list where each element is a KBaseReport.LinkedFile
html_links has a value which is a reference to a list where each element is a KBaseReport.LinkedFile
direct_html has a value which is a string
direct_html_link_index has a value which is an int
</pre>
=end html
=begin text
a reference to a hash where the following keys are defined:
text_message has a value which is a string
warnings has a value which is a reference to a list where each element is a string
objects_created has a value which is a reference to a list where each element is a KBaseReport.WorkspaceObject
file_links has a value which is a reference to a list where each element is a KBaseReport.LinkedFile
html_links has a value which is a reference to a list where each element is a KBaseReport.LinkedFile
direct_html has a value which is a string
direct_html_link_index has a value which is an int
=end text
=back
=head2 CreateParams
=over 4
=item Description
Provide the report information. The structure is:
params = {
report: {
text_message: '',
warnings: ['w1'],
objects_created: [ {
ref: 'ws/objid',
description: ''
}]
},
workspace_name: 'ws'
}
=item Definition
=begin html
<pre>
a reference to a hash where the following keys are defined:
report has a value which is a KBaseReport.Report
workspace_name has a value which is a string
</pre>
=end html
=begin text
a reference to a hash where the following keys are defined:
report has a value which is a KBaseReport.Report
workspace_name has a value which is a string
=end text
=back
=head2 ReportInfo
=over 4
=item Description
The reference to the saved KBaseReport. The structure is:
reportInfo = {
ref: 'ws/objid/ver',
name: 'myreport.2262323452'
}
=item Definition
=begin html
<pre>
a reference to a hash where the following keys are defined:
ref has a value which is a KBaseReport.ws_id
name has a value which is a string
</pre>
=end html
=begin text
a reference to a hash where the following keys are defined:
ref has a value which is a KBaseReport.ws_id
name has a value which is a string
=end text
=back
=head2 File
=over 4
=item Definition
=begin html
<pre>
a reference to a hash where the following keys are defined:
path has a value which is a string
shock_id has a value which is a string
name has a value which is a string
description has a value which is a string
</pre>
=end html
=begin text
a reference to a hash where the following keys are defined:
path has a value which is a string
shock_id has a value which is a string
name has a value which is a string
description has a value which is a string
=end text
=back
=head2 CreateExtendedReportParams
=over 4
=item Description
Parameters used to create a more complex report with file and html links
The following arguments allow the user to specify the classical data fields in the report object:
string message - simple text message to store in report object
list <WorkspaceObject> objects_created;
list <string> warnings - a list of warning messages in simple text
The following argument allows the user to specify the location of html files/directories that the report widget will render <or> link to:
list <fileRef> html_links - a list of paths or shock node IDs pointing to a single flat html file or to the top level directory of a website
The report widget can render one html view directly. Set one of the following fields to decide which view to render:
string direct_html - simple html text that will be rendered within the report widget
int direct_html_link_index - use this to specify the index of the page in html_links to view directly in the report widget (ignored if html_string is set)
The following argument allows the user to specify the location of files that the report widget should link for download:
list <fileRef> file_links - a list of paths or shock node IDs pointing to a single flat file
The following parameters indicate where the report object should be saved in the workspace:
string report_object_name - name to use for the report object (job ID is used if left unspecified)
html_window_height - height of the html window in the narrative output widget
summary_window_height - height of summary window in the narrative output widget
string workspace_name - name of workspace where object should be saved
=item Definition
=begin html
<pre>
a reference to a hash where the following keys are defined:
message has a value which is a string
objects_created has a value which is a reference to a list where each element is a KBaseReport.WorkspaceObject
warnings has a value which is a reference to a list where each element is a string
html_links has a value which is a reference to a list where each element is a KBaseReport.File
direct_html has a value which is a string
direct_html_link_index has a value which is an int
file_links has a value which is a reference to a list where each element is a KBaseReport.File
report_object_name has a value which is a string
html_window_height has a value which is a float
summary_window_height has a value which is a float
workspace_name has a value which is a string
</pre>
=end html
=begin text
a reference to a hash where the following keys are defined:
message has a value which is a string
objects_created has a value which is a reference to a list where each element is a KBaseReport.WorkspaceObject
warnings has a value which is a reference to a list where each element is a string
html_links has a value which is a reference to a list where each element is a KBaseReport.File
direct_html has a value which is a string
direct_html_link_index has a value which is an int
file_links has a value which is a reference to a list where each element is a KBaseReport.File
report_object_name has a value which is a string
html_window_height has a value which is a float
summary_window_height has a value which is a float
workspace_name has a value which is a string
=end text
=back
=cut
package KBaseReport::KBaseReportClient::RpcClient;
use base 'JSON::RPC::Client';
use POSIX;
use strict;
#
# Override JSON::RPC::Client::call because it doesn't handle error returns properly.
#
sub call {
my ($self, $uri, $headers, $obj) = @_;
my $result;
{
if ($uri =~ /\?/) {
$result = $self->_get($uri);
}
else {
Carp::croak "not hashref." unless (ref $obj eq 'HASH');
$result = $self->_post($uri, $headers, $obj);
}
}
my $service = $obj->{method} =~ /^system\./ if ( $obj );
$self->status_line($result->status_line);
if ($result->is_success) {
return unless($result->content); # notification?
if ($service) {
return JSON::RPC::ServiceObject->new($result, $self->json);
}
return JSON::RPC::ReturnObject->new($result, $self->json);
}
elsif ($result->content_type eq 'application/json')
{
return JSON::RPC::ReturnObject->new($result, $self->json);
}
else {
return;
}
}
sub _post {
my ($self, $uri, $headers, $obj) = @_;
my $json = $self->json;
$obj->{version} ||= $self->{version} || '1.1';
if ($obj->{version} eq '1.0') {
delete $obj->{version};
if (exists $obj->{id}) {
$self->id($obj->{id}) if ($obj->{id}); # if undef, it is notification.
}
else {
$obj->{id} = $self->id || ($self->id('JSON::RPC::Client'));
}
}
else {
# $obj->{id} = $self->id if (defined $self->id);
# Assign a random number to the id if one hasn't been set
$obj->{id} = (defined $self->id) ? $self->id : substr(rand(),2);
}
my $content = $json->encode($obj);
$self->ua->post(
$uri,
Content_Type => $self->{content_type},
Content => $content,
Accept => 'application/json',
@$headers,
($self->{token} ? (Authorization => $self->{token}) : ()),
);
}
1;
| 29.856053 | 155 | 0.664868 |
Subsets and Splits
HTML Code Excluding Scripts
The query retrieves a limited set of HTML content entries that are longer than 8 characters and do not contain script tags, offering only basic filtering with minimal analytical value.