Skip to content

Commit

Permalink
Fix ASE bulk_insert for bind changes in 0e77335
Browse files Browse the repository at this point in the history
When using the bulk API, update the binds to the structure expected by
_execute_array. Also temporarily disable $sth->finish in the bulk API
codepath because for some reason it rolls everything back. Modify the
test to always exercise both codepaths from now on.
  • Loading branch information
ribasushi authored and rkitover committed Feb 14, 2012
1 parent eec07bc commit 6d5679b
Show file tree
Hide file tree
Showing 2 changed files with 73 additions and 57 deletions.
86 changes: 41 additions & 45 deletions lib/DBIx/Class/Storage/DBI/Sybase/ASE.pm
Expand Up @@ -25,7 +25,6 @@ __PACKAGE__->datetime_parser_type(
__PACKAGE__->mk_group_accessors('simple' =>
qw/_identity _blob_log_on_update _writer_storage _is_extra_storage
_bulk_storage _is_bulk_storage _began_bulk_work
_bulk_disabled_due_to_coderef_connect_info_warned
_identity_method/
);

Expand Down Expand Up @@ -474,7 +473,6 @@ sub update {
my %blobs_to_empty = map { ($_ => delete $fields->{$_}) } keys %$blob_cols;

# We can't only update NULL blobs, because blobs cannot be in the WHERE clause.

$self->next::method($source, \%blobs_to_empty, $where, @rest);

# Now update the blobs before the other columns in case the update of other
Expand Down Expand Up @@ -511,22 +509,15 @@ sub insert_bulk {

my $is_identity_insert = (first { $_ eq $identity_col } @{$cols}) ? 1 : 0;

my @source_columns = $source->columns;

my $use_bulk_api =
$self->_bulk_storage &&
$self->_get_dbh->{syb_has_blk};

if ((not $use_bulk_api)
&&
(ref($self->_dbi_connect_info->[0]) eq 'CODE')
&&
(not $self->_bulk_disabled_due_to_coderef_connect_info_warned)) {
carp <<'EOF';
Bulk API support disabled due to use of a CODEREF connect_info. Reverting to
regular array inserts.
EOF
$self->_bulk_disabled_due_to_coderef_connect_info_warned(1);
if (! $use_bulk_api and ref($self->_dbi_connect_info->[0]) eq 'CODE') {
carp_unique( join ' ',
'Bulk API support disabled due to use of a CODEREF connect_info.',
'Reverting to regular array inserts.',
);
}

if (not $use_bulk_api) {
Expand Down Expand Up @@ -575,27 +566,34 @@ EOF
# otherwise, use the bulk API

# rearrange @$data so that columns are in database order
my %orig_idx;
@orig_idx{@$cols} = 0..$#$cols;
# and so we submit a full column list
my %orig_order = map { $cols->[$_] => $_ } 0..$#$cols;

my %new_idx;
@new_idx{@source_columns} = 0..$#source_columns;
my @source_columns = $source->columns;

# bcp identity index is 1-based
my $identity_idx = first { $source_columns[$_] eq $identity_col } (0..$#source_columns);
$identity_idx = defined $identity_idx ? $identity_idx + 1 : 0;

my @new_data;
for my $datum (@$data) {
my $new_datum = [];
for my $col (@source_columns) {
# identity data will be 'undef' if not $is_identity_insert
# columns with defaults will also be 'undef'
$new_datum->[ $new_idx{$col} ] =
exists $orig_idx{$col} ? $datum->[ $orig_idx{$col} ] : undef;
}
push @new_data, $new_datum;
for my $slice_idx (0..$#$data) {
push @new_data, [map {
# identity data will be 'undef' if not $is_identity_insert
# columns with defaults will also be 'undef'
exists $orig_order{$_}
? $data->[$slice_idx][$orig_order{$_}]
: undef
} @source_columns];
}

# bcp identity index is 1-based
my $identity_idx = exists $new_idx{$identity_col} ?
$new_idx{$identity_col} + 1 : 0;
my $proto_bind = $self->_resolve_bindattrs(
$source,
[map {
[ { dbic_colname => $source_columns[$_], _bind_data_slice_idx => $_ }
=> $new_data[0][$_] ]
} (0 ..$#source_columns) ],
$columns_info
);

## Set a client-side conversion error handler, straight from DBD::Sybase docs.
# This ignores any data conversion errors detected by the client side libs, as
Expand All @@ -621,6 +619,7 @@ EOF

my $guard = $bulk->txn_scope_guard;

## FIXME - once this is done - address the FIXME on finish() below
## XXX get this to work instead of our own $sth
## will require SQLA or *Hacks changes for ordered columns
# $bulk->next::method($source, \@source_columns, \@new_data, {
Expand Down Expand Up @@ -648,13 +647,19 @@ EOF
}
);

my @bind = map { [ $source_columns[$_] => $_ ] } (0 .. $#source_columns);
{
# FIXME the $sth->finish in _execute_array does a rollback for some
# reason. Disable it temporarily until we fix the SQLMaker thing above
no warnings 'redefine';
no strict 'refs';
local *{ref($sth).'::finish'} = sub {};

$self->_execute_array(
$source, $sth, \@bind, \@source_columns, \@new_data, sub {
$guard->commit
}
);
$self->_execute_array(
$source, $sth, $proto_bind, \@source_columns, \@new_data
);
}

$guard->commit;

$bulk->_query_end($sql);
} catch {
Expand All @@ -681,15 +686,6 @@ EOF
}
}

sub _dbh_execute_array {
my ($self, $sth, $tuple_status, $cb) = @_;

my $rv = $self->next::method($sth, $tuple_status);
$cb->() if $cb;

return $rv;
}

# Make sure blobs are not bound as placeholders, and return any non-empty ones
# as a hash.
sub _remove_blob_cols {
Expand Down
44 changes: 32 additions & 12 deletions t/746sybase.t
Expand Up @@ -8,20 +8,39 @@ use DBIx::Class::Optional::Dependencies ();
use lib qw(t/lib);
use DBICTest;

my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_SYBASE_${_}" } qw/DSN USER PASS/};
if (not ($dsn && $user)) {
plan skip_all => join ' ',
'Set $ENV{DBICTEST_SYBASE_DSN}, _USER and _PASS to run this test.',
'Warning: This test drops and creates the tables:',
"'artist', 'money_test' and 'bindtype_test'",
;
};

plan skip_all => 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('test_rdbms_ase')
unless DBIx::Class::Optional::Dependencies->req_ok_for ('test_rdbms_ase');

my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_SYBASE_${_}" } qw/DSN USER PASS/};
# first run the test without the lang variable set
# it is important to do this before module load, hence
# the subprocess before the optdep check
if ($ENV{LANG} and $ENV{LANG} ne 'C') {
my $oldlang = $ENV{LANG};
local $ENV{LANG} = 'C';

my $TESTS = 66 + 2;
pass ("Your lang is set to $oldlang - testing with C first");

if (not ($dsn && $user)) {
plan skip_all =>
'Set $ENV{DBICTEST_SYBASE_DSN}, _USER and _PASS to run this test' .
"\nWarning: This test drops and creates the tables " .
"'artist', 'money_test' and 'bindtype_test'";
} else {
plan tests => $TESTS*2 + 1;
my @cmd = ($^X, __FILE__);

# this is cheating, and may even hang here and there (testing on windows passed fine)
# will be replaced with Test::SubExec::Noninteractive in due course
require IPC::Open2;
IPC::Open2::open2(my $out, my $in, @cmd);
while (my $ln = <$out>) {
print " $ln";
}

wait;
ok (! $?, "Wstat $? from: @cmd");
}

my @storage_types = (
Expand Down Expand Up @@ -63,9 +82,8 @@ for my $storage_type (@storage_types) {

if ($storage_idx == 0 &&
$schema->storage->isa('DBIx::Class::Storage::DBI::Sybase::ASE::NoBindVars')) {
# no placeholders in this version of Sybase or DBD::Sybase (or using FreeTDS)
my $tb = Test::More->builder;
$tb->skip('no placeholders') for 1..$TESTS;
# no placeholders in this version of Sybase or DBD::Sybase (or using FreeTDS)
skip "Skipping entire test for $storage_type - no placeholder support", 1;
next;
}

Expand Down Expand Up @@ -612,6 +630,8 @@ SQL

is $ping_count, 0, 'no pings';

done_testing;

# clean up our mess
END {
if (my $dbh = eval { $schema->storage->_dbh }) {
Expand Down

0 comments on commit 6d5679b

Please sign in to comment.