diff --git a/META.yml b/META.yml index 96a0f72..83d9ad0 100644 --- a/META.yml +++ b/META.yml @@ -39,10 +39,8 @@ requires: DateTime::Format::SQLite: 0 Digest::MD5: 0 Filter::Util::Call: 0 - Hash::Merge: 0.12 List::MoreUtils: 0 Log::Any: 0 - Module::Find: 0 Params::Validate: 0 Scalar::Util: 0 Sub::Exporter: 0 diff --git a/Makefile.PL b/Makefile.PL index fbacf33..69a3a31 100644 --- a/Makefile.PL +++ b/Makefile.PL @@ -21,7 +21,6 @@ requires 'Class::MOP'; requires 'Class::Inspector'; requires 'Class::Data::Inheritable'; requires 'Data::Page' => 2.00; -requires 'Hash::Merge' => 0.12; requires 'Params::Validate'; requires 'Log::Any'; requires 'Digest::MD5'; @@ -31,7 +30,6 @@ requires 'DateTime'; requires 'Sub::Exporter'; requires 'Capture::Tiny'; requires 'Data::Dump'; -requires 'Module::Find'; requires 'Cache::LRU'; build_requires 'Test::More' => 0.88; diff --git a/lib/DBIx/ObjectMapper/Engine.pm b/lib/DBIx/ObjectMapper/Engine.pm index 243b016..906de75 100644 --- a/lib/DBIx/ObjectMapper/Engine.pm +++ b/lib/DBIx/ObjectMapper/Engine.pm @@ -35,6 +35,7 @@ sub update { } sub insert { } sub create { } sub delete { } +sub union { } sub iterator { } 1; diff --git a/lib/DBIx/ObjectMapper/Engine/DBI.pm b/lib/DBIx/ObjectMapper/Engine/DBI.pm index 979bf3e..a3d721c 100644 --- a/lib/DBIx/ObjectMapper/Engine/DBI.pm +++ b/lib/DBIx/ObjectMapper/Engine/DBI.pm @@ -21,6 +21,7 @@ sub _init { my $param = shift || confess 'invalid parameter.'; $param = [ $param, @_ ] unless ref $param; + my $external_dbh; my @connect_info; my $option; if ( ref $param eq 'ARRAY' ) { @@ -28,6 +29,7 @@ sub _init { $option = $param->[3] if $param->[3]; } elsif ( ref $param eq 'HASH' ) { + $external_dbh = delete $param->{external_dbh}; @connect_info = ( delete $param->{dsn}, delete $param->{username}, @@ -47,7 +49,7 @@ sub _init { = ref $disconnect_do eq 'ARRAY' ? $disconnect_do : [$disconnect_do]; for my $name ( qw(db_schema namesep quote datetime_parser iterator - time_zone disable_prepare_caching cache) ) { + time_zone disable_prepare_caching cache connect_identifier) ) { $self->{$name} = delete $option->{$name} || undef; } @@ -60,6 +62,7 @@ sub _init { %{ $option || {} } }; + $self->{external_dbh} = $external_dbh; $self->{connect_info} = \@connect_info; $self->{driver_type} = undef; $self->{driver} = undef; @@ -156,11 +159,16 @@ sub _connect { my $self = shift; my $dbh = do { - if ($INC{'Apache/DBI.pm'} && $ENV{MOD_PERL}) { - local $DBI::connect_via = 'connect'; # Disable Apache::DBI. - DBI->connect( @{ $self->{connect_info} } ); - } else { - DBI->connect( @{ $self->{connect_info} } ); + if ($self->{external_dbh}) { + $self->{external_dbh}; + } + else { + if ($INC{'Apache/DBI.pm'} && $ENV{MOD_PERL}) { + local $DBI::connect_via = 'connect'; # Disable Apache::DBI. + DBI->connect( @{ $self->{connect_info} } ); + } else { + DBI->connect( @{ $self->{connect_info} } ); + } } }; @@ -175,13 +183,14 @@ sub _connect { $self->{driver} = DBIx::ObjectMapper::Engine::DBI::Driver->new( $driver_type, $dbh, - db_schema => $self->{db_schema} || undef, - namesep => $self->{namesep} || undef, - quote => $self->{quote} || undef, - query => $self->query, - log => $self->log, - datetime_parser => $self->{datetime_parser} || undef, - time_zone => $self->{time_zone}, + db_schema => $self->{db_schema} || undef, + connect_identifier => $self->{connect_identifier} || undef, + namesep => $self->{namesep} || undef, + quote => $self->{quote} || undef, + query => $self->query, + log => $self->log, + datetime_parser => $self->{datetime_parser} || undef, + time_zone => $self->{time_zone}, ); if ( $self->{time_zone} @@ -200,10 +209,17 @@ sub _connect { sub disconnect { my ($self) = @_; - if( my $dbh = $self->{_dbh} ) { + my $dbh = $self->{_dbh}; + if ( + $dbh && + ( + !defined($self->{external_dbh}) || + $self->{external_dbh} != $dbh + ) + ) { $self->dbh_do( $self->{disconnect_do}, $dbh ); while( $self->{txn_depth} > 0 ) { - $self->_txn_rollback; + $self->txn_rollback; } $dbh->disconnect; $self->log_connect('DISCONNECT'); @@ -466,7 +482,8 @@ sub select_single { else { #$result = $self->dbh->selectrow_arrayref($sql, +{}, @bind); my $sth = $self->_prepare($sql); - $sth->execute(@bind) || confess $sth->errstr; + my @raw_bind = $self->driver->bind_params($sth, @bind); + $sth->execute(@raw_bind) || confess $sth->errstr; $result = $sth->fetchrow_arrayref; $sth->finish; $self->{sql_cnt}++; @@ -505,7 +522,9 @@ sub update { my ( $sql, @bind ) = $query->as_sql; $self->log_sql($sql, @bind); - my $ret = $self->dbh->do($sql, {}, @bind); + my $sth = $self->dbh->prepare($sql); + my @raw_bind = $self->driver->bind_params($sth, @bind); + my $ret = $sth->execute(@raw_bind); $self->{sql_cnt}++; return $ret; } @@ -522,7 +541,9 @@ sub insert { my ( $sql, @bind ) = $query->as_sql; $self->log_sql($sql, @bind); - $self->dbh->do( $sql, {}, @bind ); + my $sth = $self->dbh->prepare($sql); + my @raw_bind = $self->driver->bind_params($sth, @bind); + $sth->execute(@raw_bind); $self->{sql_cnt}++; my $ret_id = ref($query->values) eq 'HASH' ? $query->values : +{}; @@ -565,6 +586,16 @@ sub delete { return $ret; } +sub union { + my ( $self, $query, $callback ) = @_; + my $query_class = ref( $self->query ) . '::Union'; + unless ( ref $query eq $query_class ) { + $query = $self->_as_query_object( 'union', $query ); + } + return $self->iterator->new( $query, $self, $callback ); +} + + # XXXX TODO CREATE TABLE # sub create { } @@ -671,6 +702,7 @@ DBIx::ObjectMapper::Engine::DBI - the DBI engine on_connect_do => [], on_disconnect_do => [], db_schema => 'public', + connect_identifier => undef, namesep => '.', quote => '"', iterator => '', @@ -692,6 +724,7 @@ DBIx::ObjectMapper::Engine::DBI - the DBI engine on_connect_do => [], on_disconnect_do => [], db_schema => 'public', + connect_identifier => undef, namesep => '.', quote => '"', iterator => '', diff --git a/lib/DBIx/ObjectMapper/Engine/DBI/BoundParam.pm b/lib/DBIx/ObjectMapper/Engine/DBI/BoundParam.pm new file mode 100644 index 0000000..7748c69 --- /dev/null +++ b/lib/DBIx/ObjectMapper/Engine/DBI/BoundParam.pm @@ -0,0 +1,23 @@ +package DBIx::ObjectMapper::Engine::DBI::BoundParam; +use strict; +use warnings; +use Carp::Clan qw/^DBIx::ObjectMapper/; +use Params::Validate qw(:all); + +my $ATTRIBUTES = { + value => { type => SCALAR }, + type => { type => SCALAR }, + column => { type => SCALAR }, +}; + +sub new { + my $class = shift; + my %attr = validate( @_, $ATTRIBUTES ); + return bless \%attr, $class; +} + +sub value { $_[0]->{value} } +sub type { $_[0]->{type} } +sub column { $_[0]->{column} } + +1; diff --git a/lib/DBIx/ObjectMapper/Engine/DBI/Driver.pm b/lib/DBIx/ObjectMapper/Engine/DBI/Driver.pm index 3b26aa3..56dc052 100644 --- a/lib/DBIx/ObjectMapper/Engine/DBI/Driver.pm +++ b/lib/DBIx/ObjectMapper/Engine/DBI/Driver.pm @@ -36,6 +36,7 @@ sub log { $_[0]->{log} } sub namesep { $_[0]->{namesep} } sub quote { $_[0]->{quote} } sub time_zone { $_[0]->{time_zone} } +sub connect_identifier { $_[0]->{connect_identifier} } sub init { } @@ -229,4 +230,77 @@ sub release_savepoint {} sub rollback_savepoint {} +sub bind_params { my ($self, $sth, @binds) = @_; @binds } + +sub _type_map_data {{ + # String + 'varchar' => 'String', + 'character varying' => 'String', + 'char' => 'String', + 'character' => 'String', + + # Int + 'int' => 'Int', + 'integer' => 'Int', + 'mediumint' => 'Int', + + # SmallInt + 'tinyint' => 'SmallInt', + 'smallint' => 'SmallInt', + + # BigInt + 'bigint' => 'BigInt', + + # Boolean + 'boolean' => 'Boolean', + 'bool' => 'Boolean', + + # Text + 'text' => 'Text', + + # Date + 'date' => 'Date', + + # DateTime + 'datetime' => 'Datetime', + 'timestamp' => 'Datetime', + 'timestamp without time zone' => 'Datetime', + 'timestamp with time zone' => 'Datetime', + + # Time + 'time' => 'Time', + 'time without time zone' => 'Time', + 'time with time zone' => 'Time', + + # Interval + 'interval' => 'Interval', + + # float + 'float' => 'Float', + 'real' => 'Float', + 'double precision' => 'Float', + 'double' => 'Float', + + # Numeric + 'numeric' => 'Numeric', + 'decimal' => 'Numeric', + 'dec' => 'Numeric', + 'money' => 'Numeric', + + # Blob + 'blob' => 'Binary', + 'bytea' => 'Binary', + 'longblob' => 'Binary', + + # Bit + 'bit' => 'Bit', + 'bit varying' => 'Bit', +}} + +sub type_map { + my $class = shift; + my $type = shift; + return $class->_type_map_data->{$type}; +} + 1; diff --git a/lib/DBIx/ObjectMapper/Engine/DBI/Driver/Oracle.pm b/lib/DBIx/ObjectMapper/Engine/DBI/Driver/Oracle.pm new file mode 100644 index 0000000..aecc9cd --- /dev/null +++ b/lib/DBIx/ObjectMapper/Engine/DBI/Driver/Oracle.pm @@ -0,0 +1,320 @@ +package DBIx::ObjectMapper::Engine::DBI::Driver::Oracle; +use strict; +use warnings; +use Try::Tiny; +use Carp::Clan qw/^DBIx::ObjectMapper/; +use base qw(DBIx::ObjectMapper::Engine::DBI::Driver); +use DBIx::ObjectMapper::Engine::DBI::BoundParam; +use DBD::Oracle qw(:ora_types); +use Scalar::Util qw(blessed); + +sub init { + my $self = shift; + my $dbh = shift; + + try { + require DateTime::Format::Oracle; + DateTime::Format::Oracle->import; + $self->{datetime_parser} ||= 'DateTime::Format::Oracle'; + } catch { + confess "Couldn't load DateTime::Format::Oracle: $_"; + }; + + $self->{db_schema} ||= do { + my $system_context_row_ref = $dbh->selectall_arrayref("select sys_context( 'userenv', 'current_schema' ) from dual"); + $system_context_row_ref->[0][0]; + }; + + $self->{_cache}->{_oracle} = { + primary_keys => {}, + foreign_keys => {}, + unique_info => {}, + }; + $self->{namesep} = q{.}; + $self->{quote} = q{'}; +} + +sub last_insert_id {} + +sub get_primary_key { + my ($self, $dbh, $table) = @_; + if (!$self->{_cache}->{_oracle}->{primary_keys}->{$table}) { + $self->{_cache}->{_oracle}->{primary_keys}->{$table} = + #+[keys %{$dbh->primary_key_info('', $self->db_schema, $table)->fetchall_hashref('COLUMN_NAME')}]; + +[keys %{$self->_primary_key_info($dbh, '', $self->db_schema, $table)->fetchall_hashref('COLUMN_NAME')}]; + } + return @{$self->{_cache}->{_oracle}->{primary_keys}->{$table}}; +} + +sub get_table_uniq_info { + my ($self, $dbh, $table) = @_; + if (!$self->{_cache}->{_oracle}->{unique_info}->{$table}) { + my $sth = $dbh->prepare(q{ + select ai.index_name, aic.column_name + from all_indexes ai + join all_ind_columns aic + on aic.index_name = ai.index_name + and aic.index_owner = ai.owner + where ai.uniqueness = 'UNIQUE' + and aic.table_name = ? + and aic.index_owner = ? + }); + $sth->execute($table, $self->db_schema); + + my $unique_rows = $sth->fetchall_arrayref(); + my %unique_constraints = map { + $_->[0] => [] + } @$unique_rows; + for my $row (@$unique_rows) { + push @{$unique_constraints{$row->[0]}}, $row->[1]; + } + + $self->{_cache}->{_oracle}->{unique_info}->{$table} = [ + map { + [$_ => $unique_constraints{$_}] + } keys %unique_constraints + ]; + } + + return $self->{_cache}->{_oracle}->{unique_info}->{$table}; +} + +sub get_table_fk_info { + my ($self, $dbh, $table) = @_; + + if (!$self->{_cache}->{_oracle}->{foreign_keys}->{$table}) { + $self->{_cache}->{_oracle}->{foreign_keys}->{$table} = $self->_foreign_key_info($dbh,undef, undef, undef, '', $self->db_schema, $table); + } + + return $self->{_cache}->{_oracle}->{foreign_keys}->{$table}; +} + +sub get_tables { + my ( $self, $dbh ) = @_; + return $self->_truncate_quote_and_sep( + sort {$a cmp $b} + grep { $_ !~ /\.BIN\$/ } + map {$_ =~ s/"//g; $_} + ( + $dbh->tables(undef, $self->db_schema, undef, 'TABLE'), + $dbh->tables(undef, $self->db_schema, undef, 'VIEW') + ) + ); +} + +sub set_time_zone_query { + my ( $self, $dbh ) = @_; + my $tz = $self->time_zone || return; + return "ALTER SESSION SET time_zone = " . $dbh->quote($tz); +} + +sub set_savepoint { + my ($self, $dbh, $name) = @_; + my $quoted_name = $dbh->quote($name); + $dbh->do("SAVEPOINT $quoted_name"); +} + +sub release_savepoint {} + +sub rollback_savepoint { + my ($self, $dbh, $name) = @_; + my $quoted_name = $dbh->quote($name); + $dbh->do("ROLLBACK TO $quoted_name"); +} + +sub bind_params { + my ($self, $sth, @binds) = @_; + my $bind_position = 0; + + return map { + my $bind = $_; + $bind_position++; + + if (ref $bind && blessed($bind) && $bind->isa('DBIx::ObjectMapper::Engine::DBI::BoundParam')) { + if ($bind->type eq 'binary') { + $sth->bind_param( + $bind_position, + undef, + { + ora_type => SQLT_BIN, + ora_field => $bind->column + } + ); + } + else { + confess 'Unknown type for a bound param: ' . $bind->type; + } + $bind->value; + } + else { + $bind; + } + } @binds; +} + +sub _type_map_data { + my $class = shift; + my $map = $class->SUPER::_type_map_data(@_); + $map->{number} = 'Numeric'; + $map->{blob} = 'Blob'; + $map->{long} = 'Binary'; + $map->{varchar2} = 'Text'; + return $map; +} + +sub type_map { + my $class = shift; + my $type = shift; + $type =~ s/\(.*$//; + return $class->_type_map_data->{$type}; +} + +###### DBD::Oracle override below because DBD::Oracle is inefficient for large schemas +sub _primary_key_info { + my($self, $dbh, $catalog, $schema, $table) = @_; + if (ref $catalog eq 'HASH') { + ($schema, $table) = @$catalog{'TABLE_SCHEM','TABLE_NAME'}; + $catalog = undef; + } + my $SQL = <<'SQL'; +SELECT * + FROM +( + SELECT /*+ CHOOSE */ + NULL TABLE_CAT + , c.OWNER TABLE_SCHEM + , c.TABLE_NAME TABLE_NAME + , c.COLUMN_NAME COLUMN_NAME + , c.POSITION KEY_SEQ + , c.CONSTRAINT_NAME PK_NAME + FROM ALL_CONSTRAINTS p + , ALL_CONS_COLUMNS c + WHERE p.OWNER = c.OWNER + AND p.TABLE_NAME = c.TABLE_NAME + AND p.CONSTRAINT_NAME = c.CONSTRAINT_NAME + AND p.CONSTRAINT_TYPE = 'P' +) + WHERE TABLE_SCHEM = ? + AND TABLE_NAME = ? + ORDER BY TABLE_SCHEM, TABLE_NAME, KEY_SEQ +SQL +#warn "@_\n$Sql ($schema, $table)"; + my $sth = $dbh->prepare($SQL) or return undef; + $sth->execute($schema, $table) or return undef; + $sth; +} + +sub build_constraint_cache { + my ($self, $dbh, $attr) = @_; + my $cache = {}; + my $sth; + + $sth = $dbh->prepare(q{ + select OWNER, R_OWNER, TABLE_NAME, CONSTRAINT_NAME, R_CONSTRAINT_NAME, CONSTRAINT_TYPE + from ALL_CONSTRAINTS + where CONSTRAINT_TYPE in ('P','U', 'R') + }) or return undef; + $sth->execute() or return undef; + $cache->{constraints} = [map { + +{ + OWNER => $_->[0], + R_OWNER => $_->[1], + TABLE_NAME => $_->[2], + CONSTRAINT_NAME => $_->[3], + R_CONSTRAINT_NAME => $_->[4], + CONSTRAINT_TYPE => $_->[5], + } + } + @{$sth->fetchall_arrayref}]; + + $sth = $dbh->prepare(q{ + select OWNER, CONSTRAINT_NAME, COLUMN_NAME, POSITION + from ALL_CONS_COLUMNS + order by POSITION + }) or return undef; + $sth->execute() or return undef; + $cache->{columns} = [map { + +{ + OWNER => $_->[0], + CONSTRAINT_NAME => $_->[1], + COLUMN_NAME => $_->[2], + POSITION => $_->[3], + } + } + @{$sth->fetchall_arrayref}]; + + $self->{_constraint_cache} = $cache; + return 1; +} + +sub _foreign_key_info { + my $self = shift; + my $dbh = shift; + my $attr = ( ref $_[0] eq 'HASH') ? $_[0] : { + 'UK_TABLE_SCHEM' => $_[1],'UK_TABLE_NAME' => $_[2] + ,'FK_TABLE_SCHEM' => $_[4],'FK_TABLE_NAME' => $_[5] }; + + if (!$self->{_constraint_cache}) { + return unless $self->build_constraint_cache($dbh, $attr); + } + + my @constraints = grep { + ($_->{CONSTRAINT_TYPE} eq 'R') && + ($_->{R_CONSTRAINT_NAME}) && + ($_->{OWNER} eq $attr->{FK_TABLE_SCHEM}) && + ($_->{TABLE_NAME} eq $attr->{FK_TABLE_NAME}) + } + @{$self->{_constraint_cache}->{constraints}}; + + for my $constraint (@constraints) { + next if $constraint->{columns}; + + my ($foreign_table) = map { $_->{TABLE_NAME} } + grep { + ($_->{CONSTRAINT_TYPE} eq 'P' || $_->{CONSTRAINT_TYPE} eq 'U') && + $_->{OWNER} eq $constraint->{R_OWNER} && + $_->{CONSTRAINT_NAME} eq $constraint->{R_CONSTRAINT_NAME} && + ($attr->{UK_TABLE_NAME} ? ($_->{TABLE_NAME} eq $attr->{UK_TABLE_NAME}) : 1) + } @{$self->{_constraint_cache}->{constraints}}; + + my @fk_columns = sort {$a->{POSITION} <=> $b->{POSITION}} + map { + +{ + foreign_table => $foreign_table, + %$_ + } + } + grep { + $_->{OWNER} eq $attr->{FK_TABLE_SCHEM} && + $_->{CONSTRAINT_NAME} eq $constraint->{CONSTRAINT_NAME} + } + @{$self->{_constraint_cache}->{columns}}; + + for my $fk_column (@fk_columns) { + next if $fk_column->{foreign_column}; + ($fk_column->{foreign_column}) = map { $_->{COLUMN_NAME} } + grep { + $_->{OWNER} eq $constraint->{R_OWNER} && + $_->{CONSTRAINT_NAME} eq $constraint->{R_CONSTRAINT_NAME} && + $_->{POSITION} eq $fk_column->{POSITION} + } + @{$self->{_constraint_cache}->{columns}}; + } + + $constraint->{columns} = \@fk_columns; + } + + my @final_constraints = map { + my $constraint = $_; + +{ + table => $constraint->{columns}[0]->{foreign_table}, + keys => [map {$_->{COLUMN_NAME}} @{$constraint->{columns}}], + refs => [map {$_->{foreign_column}} @{$constraint->{columns}}], + } + } + @constraints; + + return \@final_constraints; +} + +1; diff --git a/lib/DBIx/ObjectMapper/Engine/DBI/Driver/Pg.pm b/lib/DBIx/ObjectMapper/Engine/DBI/Driver/Pg.pm index e4eeaae..2fd6c4b 100644 --- a/lib/DBIx/ObjectMapper/Engine/DBI/Driver/Pg.pm +++ b/lib/DBIx/ObjectMapper/Engine/DBI/Driver/Pg.pm @@ -4,6 +4,8 @@ use warnings; use Carp::Clan qw/^DBIx::ObjectMapper/; use Try::Tiny; use base qw(DBIx::ObjectMapper::Engine::DBI::Driver); +use Scalar::Util qw(blessed); +use DBD::Pg qw(:pg_types); sub init { my $self = shift; @@ -127,4 +129,38 @@ sub rollback_savepoint { $dbh->pg_rollback_to($name); } +sub bind_params { + my ($self, $sth, @binds) = @_; + my $bind_position = 0; + + return map { + my $bind = $_; + $bind_position++; + + if (ref $bind && blessed($bind) && $bind->isa('DBIx::ObjectMapper::Engine::DBI::BoundParam')) { + if ($bind->type eq 'binary') { + $sth->bind_param( + $bind_position, + undef, + { pg_type => DBD::Pg::PG_BYTEA } + ); + } + else { + confess 'Unknown type for a bound param: ' . $bind->type; + } + $bind->value; + } + else { + $bind; + } + } @binds; +} + +sub _type_map_data { + my $class = shift; + my $map = $class->SUPER::_type_map_data(@_); + $map->{bytea} = 'ByteA'; + return $map; +} + 1; diff --git a/lib/DBIx/ObjectMapper/Engine/DBI/Iterator.pm b/lib/DBIx/ObjectMapper/Engine/DBI/Iterator.pm index 9d7d8b2..7447a9b 100644 --- a/lib/DBIx/ObjectMapper/Engine/DBI/Iterator.pm +++ b/lib/DBIx/ObjectMapper/Engine/DBI/Iterator.pm @@ -37,9 +37,10 @@ sub sth { { my ( $sql, @bind ) = $self->query->as_sql; my $sth = $self->engine->_prepare($sql); - $sth->execute(@bind) or confess $sth->errstr; + my @raw_bind = $self->engine->driver->bind_params($sth, @bind); + $sth->execute(@raw_bind) or confess $sth->errstr; $self->engine->{sql_cnt}++; - $self->engine->log_sql($sql, @bind); + $self->engine->log_sql($sql, @raw_bind); my $size = $sth->rows; # unless( $size ) { diff --git a/lib/DBIx/ObjectMapper/Mapper.pm b/lib/DBIx/ObjectMapper/Mapper.pm index 60ae10e..4da06eb 100644 --- a/lib/DBIx/ObjectMapper/Mapper.pm +++ b/lib/DBIx/ObjectMapper/Mapper.pm @@ -165,7 +165,7 @@ sub new { elsif( $option->{polymorphic_on} and $option->{polymorphic_identity} ) { $option->{default_condition} = [ $option->{table}->c( $option->{polymorphic_on} ) - == $option->{polymorphic_identity} ]; + eq $option->{polymorphic_identity} ]; $option->{default_value}->{$option->{polymorphic_on}} = $option->{polymorphic_identity}; diff --git a/lib/DBIx/ObjectMapper/Mapper/Attribute/Property.pm b/lib/DBIx/ObjectMapper/Mapper/Attribute/Property.pm index 9c4fe9f..181ddcd 100644 --- a/lib/DBIx/ObjectMapper/Mapper/Attribute/Property.pm +++ b/lib/DBIx/ObjectMapper/Mapper/Attribute/Property.pm @@ -27,12 +27,15 @@ sub new { } ); - bless \%prop, $class; + my $self = bless \%prop, $class; + $self->{is_column} = $self->type eq 'column'; + return $self; } -sub lazy { $_[0]->{lazy} } -sub getter { $_[0]->{getter} } -sub setter { $_[0]->{setter} } +sub lazy {$_[0]->{lazy}} +sub getter {$_[0]->{getter}} +sub setter {$_[0]->{setter}} +sub is_column {$_[0]->{is_column}} ## proxy diff --git a/lib/DBIx/ObjectMapper/Mapper/Instance.pm b/lib/DBIx/ObjectMapper/Mapper/Instance.pm index 0045587..c1df979 100644 --- a/lib/DBIx/ObjectMapper/Mapper/Instance.pm +++ b/lib/DBIx/ObjectMapper/Mapper/Instance.pm @@ -222,7 +222,7 @@ sub reducing { my %primary_key = map { $_ => 1 } @{$class_mapper->table->primary_key}; for my $prop_name ( $class_mapper->attributes->property_names ) { my $prop = $class_mapper->attributes->property_info($prop_name); - next unless $prop->type eq 'column'; + next unless $prop->is_column; my $col_name = $prop->name; my $val = $self->get_val($prop_name); next if $primary_key{$col_name} and !defined $val; @@ -286,7 +286,7 @@ sub _modify { my $col = $prop->name || $prop_name; if( exists $rdata->{$col} ) { - if( defined $rdata->{$col} and $prop->type eq 'column' ) { + if( defined $rdata->{$col} and $prop->is_column ) { $rdata->{$col} = $prop->{isa}->from_storage( $rdata->{$col} ); } $self->set_val($prop_name => $rdata->{$col}); @@ -451,7 +451,7 @@ sub is_modified { my $prop = $class_mapper->attributes->property_info($prop_name); my $col = $prop->name || $prop_name; my $val = $self->get_val($prop_name); - next unless $prop->type eq 'column' and ref $val; + next unless $prop->is_column and ref $val; if( $self->unit_of_work->change_checker->is_changed( $val ) ) { $modified_data->{$prop_name} = $val; $is_modified = 1; diff --git a/lib/DBIx/ObjectMapper/Metadata.pm b/lib/DBIx/ObjectMapper/Metadata.pm index e25dbf3..be7bef5 100644 --- a/lib/DBIx/ObjectMapper/Metadata.pm +++ b/lib/DBIx/ObjectMapper/Metadata.pm @@ -54,6 +54,11 @@ sub table { *t = \&table; +sub all_known_tables { + my $self = shift; + return values %{$self->{tables}}; +} + sub autoload_all_tables { my $self = shift; my $engine = $self->engine; @@ -83,6 +88,7 @@ sub select { $_[0]->query_object->select } sub insert { $_[0]->query_object->insert } sub delete { $_[0]->query_object->delete } sub update { $_[0]->query_object->update } +sub union { $_[0]->query_object->union } 1; diff --git a/lib/DBIx/ObjectMapper/Metadata/Sugar.pm b/lib/DBIx/ObjectMapper/Metadata/Sugar.pm index cc37b15..08689b7 100644 --- a/lib/DBIx/ObjectMapper/Metadata/Sugar.pm +++ b/lib/DBIx/ObjectMapper/Metadata/Sugar.pm @@ -2,7 +2,6 @@ package DBIx::ObjectMapper::Metadata::Sugar; use strict; use warnings; use Sub::Exporter; -use Module::Find; use DBIx::ObjectMapper::Utils; our @ATTRS = qw(Col PrimaryKey NotNull OnUpdate Default ToStorage Unique @@ -37,10 +36,33 @@ sub ServerDefault { server_default => $_[0] } sub ForeignKey { foreign_key => [ $_[0] => $_[1] ] } sub ServerCheck { server_check => $_[0] } -our @TYPES; +our @TYPES = qw( + Array + BigInt + Binary + Bit + Blob + Boolean + ByteA + Date + Datetime + Float + Int + Interval + Mush + Numeric + SmallInt + String + Text + Time + Undef + Uri + Yaml +); + { my $namespace = 'DBIx::ObjectMapper::Metadata::Table::Column::Type'; - my @type_classes = Module::Find::findallmod($namespace); + my @type_classes = map { "${namespace}::$_" } @TYPES; my $pkg = __PACKAGE__; for my $type_class ( @type_classes ) { @@ -49,7 +71,6 @@ our @TYPES; $name =~ s/^$namespace\:://; no strict 'refs'; *{"$pkg\::$name"} = sub { type => $type_class->new(@_) }; - push @TYPES, $name; } }; diff --git a/lib/DBIx/ObjectMapper/Metadata/Table.pm b/lib/DBIx/ObjectMapper/Metadata/Table.pm index 427c8d9..c867c44 100644 --- a/lib/DBIx/ObjectMapper/Metadata/Table.pm +++ b/lib/DBIx/ObjectMapper/Metadata/Table.pm @@ -6,7 +6,18 @@ use overload '""' => sub { my $self = shift; my $table_name = $self->table_name; - $table_name .= ' AS ' . $self->alias_name if $self->is_clone; + + my ($connect_identifier) = map {$_->driver->connect_identifier} grep {$_} $self->engine; + if ($connect_identifier) { + $table_name .= '@' . $connect_identifier; + } + + my $as_to_sql = ' AS '; + if ($self->engine && $self->engine->driver_type eq 'Oracle') { + $as_to_sql = ' '; + } + + $table_name .= $as_to_sql . $self->alias_name if $self->is_clone; return $table_name; }, fallback => 1 @@ -31,7 +42,7 @@ sub new { my ( $table_name, $column, $param ) = validate_pos( @_, { type => SCALAR|ARRAYREF }, - { type => ARRAYREF|SCALAR }, + { type => ARRAYREF|HASHREF|SCALAR }, { type => HASHREF, optional => 1 }, ); @@ -79,6 +90,14 @@ sub new { $self->autoload(); $column = []; } + elsif ($column and ref($column) and ref($column) eq 'HASH') { + $self->{autoload} = 1; + $self->autoload_data( + $self->engine ? (driver => $self->engine->driver) : (), + %$column + ); + $column = []; + } $self->column( $column ); @@ -354,6 +373,57 @@ sub validation { $class->__hash_accessor('validation', @_); } +sub autoloaded_data { + sub column_to_autoload_data { + my $column = shift; + return { + name => $column->name, + type => $column->type->realtype, + size => $column->type->size, + is_nullable => $column->is_nullable, + default => $column->server_default, + }; + } + + my $self = shift; + return { + primary_key => $self->primary_key, + unique_key => $self->unique_key, + foreign_key => $self->foreign_key, + column_info => [ map {column_to_autoload_data($_)} @{$self->columns} ], + }; +} + +sub autoload_data { + my $self = shift; + my %data = @_; + + my $primary_key = $data{primary_key} || []; + my $unique_key = $data{unique_key} || []; + my $foreign_key = $data{foreign_key} || []; + my $column_info = $data{column_info} || []; + my $driver = $data{driver}; + + confess "autoload_data needs driver." unless $driver; + + $self->{column_map} ||= +{}; + $self->primary_key($primary_key); + $self->unique_key($unique_key); + $self->foreign_key($foreign_key); + + for my $conf ( @$column_info ) { + my $translated_conf = {%$conf}; + my $type_class = DBIx::ObjectMapper::Metadata::Table::Column::TypeMap->get( + $translated_conf->{type}, + $driver, + ); + $translated_conf->{type} = $type_class->new(); + $translated_conf->{type}->size($conf->{size}); + $translated_conf->{type}->realtype($conf->{type}); + $translated_conf->{server_default} = delete $translated_conf->{default}; + $self->column( $translated_conf ); + } +} =head2 autoload @@ -365,29 +435,14 @@ sub autoload { confess "autoload needs engine." unless $self->engine; confess "autoload needs table_name" unless $self->table_name; - my $engine = $self->engine; - $self->{column_map} ||= +{}; - my @primary_key = $self->engine->get_primary_key( $self->table_name ); - $self->primary_key(\@primary_key); - - my $uniq_key = $self->engine->get_unique_key( $self->table_name ); - $self->unique_key($uniq_key); - - my $foreign_key = $self->engine->get_foreign_key( $self->table_name ); - $self->foreign_key($foreign_key); - - for my $conf ( @{$engine->get_column_info( $self->table_name )} ) { - my $type_class - = DBIx::ObjectMapper::Metadata::Table::Column::TypeMap->get( - $conf->{type} ); - my $realtype = $conf->{type}; - $conf->{type} = $type_class->new(); - $conf->{type}->size($conf->{size}); - $conf->{type}->realtype($realtype); - $conf->{server_default} = delete $conf->{default}; - $self->column( $conf ); - } + $self->autoload_data( + primary_key => [$self->engine->get_primary_key( $self->table_name )], + unique_key => $self->engine->get_unique_key( $self->table_name ), + foreign_key => $self->engine->get_foreign_key( $self->table_name ), + column_info => $self->engine->get_column_info( $self->table_name ), + driver => $self->engine->driver, + ); } sub column { @@ -437,7 +492,19 @@ sub _set_column { my $override_column = $self->column_map->{ $c->{name} }; if ( defined $override_column ) { my $org = $self->column( $c->{name} ); + my $overriding_type = $c->{type}; $c = DBIx::ObjectMapper::Utils::merge_hashref( { %$org }, $c ); + + # But we don't actually want a basic merge_hashref when it comes to + # the type object. We actually want to merge specified/overriden + # values, such as utf8. This is something even the old Hash::Merge + # package can't do exactly how we want. Therefore, we now carefully + # merge those: + for my $attribute (keys %$overriding_type) { + $c->{type}->{$attribute} = defined($overriding_type->{$attribute}) ? + $overriding_type->{$attribute} : + $org->{type}->{$attribute}; + } } my $name = delete $c->{name} || confess 'column name not found.'; @@ -524,7 +591,7 @@ sub _set_column { my $column_obj = $self->column_metaclass->new( name => $name, - table => $self->table_name, + table => $self->{table_name}, sep => $self->namesep, type => $c->{type} || undef, is_nullable => $c->{is_nullable}, @@ -861,7 +928,7 @@ sub clone { my @columns; for my $c ( @{$obj->columns} ) { my $new_col = $c->clone; - $new_col->{table} = $alias; + $new_col->{table} = [ $obj->table_name, $alias ]; push @columns, $new_col; } $obj->{columns} = \@columns; diff --git a/lib/DBIx/ObjectMapper/Metadata/Table/Column/Base.pm b/lib/DBIx/ObjectMapper/Metadata/Table/Column/Base.pm index 2462103..2aa4212 100644 --- a/lib/DBIx/ObjectMapper/Metadata/Table/Column/Base.pm +++ b/lib/DBIx/ObjectMapper/Metadata/Table/Column/Base.pm @@ -37,7 +37,7 @@ sub connc { my $ATTRIBUTES = { name => { type => SCALAR }, - table => { type => SCALAR }, + table => { type => SCALAR|ARRAYREF }, sep => { type => SCALAR }, type => { type => OBJECT, @@ -57,7 +57,20 @@ my $ATTRIBUTES = { }; sub name { $_[0]->{name} } -sub table { $_[0]->{table} } +sub table { + my $self = shift; + ref $self->{table} ? + $self->{table}->[1] || $self->{table}->[0] : + $self->{table} +} +sub table_name { + my $self = shift; + ref $self->{table} ? $self->{table}->[0] : $self->{table} +} +sub alias_name { + my $self = shift; + ref $self->{table} ? $self->{table}->[1] : undef; +} sub sep { $_[0]->{sep} } sub type { $_[0]->{type} } sub is_nullable { $_[0]->{is_nullable} } @@ -109,7 +122,11 @@ sub not_in { $self->op( 'NOT IN', \@values ); } -sub like { $_[0]->op( 'LIKE', $_[1]) } +sub like { + my ($self, $value, $escape_character) = @_; + return defined($escape_character) ? $self->op( 'LIKE', [$value, $escape_character] ) : + $self->op( 'LIKE', $value ); +} sub not_like { $_[0]->op( 'NOT LIKE', $_[1]) } @@ -179,7 +196,7 @@ sub _to_storage { if( defined $val and my $to_storage = $self->{to_storage} ) { $val = $to_storage->($val); } - return $self->type->to_storage($val); + return $self->type->to_storage($val, $self->name); } sub to_storage_on_update { diff --git a/lib/DBIx/ObjectMapper/Metadata/Table/Column/Type/Binary.pm b/lib/DBIx/ObjectMapper/Metadata/Table/Column/Type/Binary.pm index 8577dd1..1c15c4d 100644 --- a/lib/DBIx/ObjectMapper/Metadata/Table/Column/Type/Binary.pm +++ b/lib/DBIx/ObjectMapper/Metadata/Table/Column/Type/Binary.pm @@ -2,19 +2,5 @@ package DBIx::ObjectMapper::Metadata::Table::Column::Type::Binary; use strict; use warnings; use base qw(DBIx::ObjectMapper::Metadata::Table::Column::Type); -use DBI qw(:sql_types); - -sub set_engine_option { - my ( $self, $engine ) = @_; - $self->{escape_func} = $engine->driver->escape_binary_func($engine->dbh); -} - -sub escape_func { $_[0]->{escape_func} } - -sub to_storage { - my ( $self, $val ) = @_; - return $val unless defined $val; - return $self->escape_func->($val); -} 1; diff --git a/lib/DBIx/ObjectMapper/Metadata/Table/Column/Type/Blob.pm b/lib/DBIx/ObjectMapper/Metadata/Table/Column/Type/Blob.pm new file mode 100644 index 0000000..7266e23 --- /dev/null +++ b/lib/DBIx/ObjectMapper/Metadata/Table/Column/Type/Blob.pm @@ -0,0 +1,17 @@ +package DBIx::ObjectMapper::Metadata::Table::Column::Type::Blob; +use strict; +use warnings; +use base qw(DBIx::ObjectMapper::Metadata::Table::Column::Type); +use DBIx::ObjectMapper::Engine::DBI::BoundParam; + +sub to_storage { + my ( $self, $val, $column_name ) = @_; + return $val unless defined $val; + DBIx::ObjectMapper::Engine::DBI::BoundParam->new( + value => $val, + type => 'binary', + column => $column_name, + ); +} + +1; diff --git a/lib/DBIx/ObjectMapper/Metadata/Table/Column/Type/ByteA.pm b/lib/DBIx/ObjectMapper/Metadata/Table/Column/Type/ByteA.pm new file mode 100644 index 0000000..de132de --- /dev/null +++ b/lib/DBIx/ObjectMapper/Metadata/Table/Column/Type/ByteA.pm @@ -0,0 +1,17 @@ +package DBIx::ObjectMapper::Metadata::Table::Column::Type::ByteA; +use strict; +use warnings; +use base qw(DBIx::ObjectMapper::Metadata::Table::Column::Type); +use DBIx::ObjectMapper::Engine::DBI::BoundParam; + +sub to_storage { + my ( $self, $val, $column_name ) = @_; + return $val unless defined $val; + DBIx::ObjectMapper::Engine::DBI::BoundParam->new( + value => $val, + type => 'binary', + column => $column_name, + ); +} + +1; diff --git a/lib/DBIx/ObjectMapper/Metadata/Table/Column/Type/Datetime.pm b/lib/DBIx/ObjectMapper/Metadata/Table/Column/Type/Datetime.pm index 4c52051..c3140a7 100644 --- a/lib/DBIx/ObjectMapper/Metadata/Table/Column/Type/Datetime.pm +++ b/lib/DBIx/ObjectMapper/Metadata/Table/Column/Type/Datetime.pm @@ -26,6 +26,7 @@ sub datetime_type { my $self = shift; my $real_type = $self->{realtype} || $self->default_type; $real_type =~ s/ /_/g; + $real_type =~ s/\(.*$//; return $real_type; } diff --git a/lib/DBIx/ObjectMapper/Metadata/Table/Column/TypeMap.pm b/lib/DBIx/ObjectMapper/Metadata/Table/Column/TypeMap.pm index 048424c..3fa05c3 100644 --- a/lib/DBIx/ObjectMapper/Metadata/Table/Column/TypeMap.pm +++ b/lib/DBIx/ObjectMapper/Metadata/Table/Column/TypeMap.pm @@ -3,84 +3,16 @@ use strict; use warnings; use DBIx::ObjectMapper::Utils; -our $map = { - - # String - 'varchar' => 'String', - 'character varying' => 'String', - 'char' => 'String', - 'character' => 'String', - - # Int - 'int' => 'Int', - 'integer' => 'Int', - 'mediumint' => 'Int', - - # SmallInt - 'tinyint' => 'SmallInt', - 'smallint' => 'SmallInt', - - # BigInt - 'bigint' => 'BigInt', - - # Boolean - 'boolean' => 'Boolean', - 'bool' => 'Boolean', - - # Text - 'text' => 'Text', - - # Date - 'date' => 'Date', - - # DateTime - 'datetime' => 'Datetime', - 'timestamp' => 'Datetime', - 'timestamp without time zone' => 'Datetime', - 'timestamp with time zone' => 'Datetime', - - # Time - 'time' => 'Time', - 'time without time zone' => 'Time', - 'time with time zone' => 'Time', - - # Interval - 'interval' => 'Interval', - - # float - 'float' => 'Float', - 'real' => 'Float', - 'double precision' => 'Float', - 'double' => 'Float', - - # Numeric - 'numeric' => 'Numeric', - 'decimal' => 'Numeric', - 'dec' => 'Numeric', - 'money' => 'Numeric', - - # Blob - 'blob' => 'Binary', - 'bytea' => 'Binary', - 'longblob' => 'Binary', - - # Bit - 'bit' => 'Bit', - 'bit varying' => 'Bit', -}; - -# Array -our $is_array = qr/^.+\[\]$/; - sub get { - my $class = shift; - my $type = shift; - - my $prefix = 'DBIx::ObjectMapper::Metadata::Table::Column::Type::'; + my $class = shift; + my $type = shift; + my $driver = shift; + my $prefix = 'DBIx::ObjectMapper::Metadata::Table::Column::Type::'; + my $is_array = qr/^.+\[\]$/; my $type_class; - if( $map->{$type} ) { - $type_class = $prefix . $map->{$type}; + if( $driver->type_map($type) ) { + $type_class = $prefix . $driver->type_map($type); } elsif( $type =~ /$is_array/ ) { $type_class = $prefix . 'Array'; diff --git a/lib/DBIx/ObjectMapper/Query.pm b/lib/DBIx/ObjectMapper/Query.pm index 941b987..61c1a24 100644 --- a/lib/DBIx/ObjectMapper/Query.pm +++ b/lib/DBIx/ObjectMapper/Query.pm @@ -14,5 +14,6 @@ sub insert { DBIx::ObjectMapper::Query::Insert->new( shift->metadata, @_ ) } sub update { DBIx::ObjectMapper::Query::Update->new( shift->metadata, @_ ) } sub delete { DBIx::ObjectMapper::Query::Delete->new( shift->metadata, @_ ) } sub count { DBIx::ObjectMapper::Query::Count->new( shift->metadata, @_ ) } +sub union { DBIx::ObjectMapper::Query::Union->new( shift->metadata, @_ ) } 1; diff --git a/lib/DBIx/ObjectMapper/Query/Base.pm b/lib/DBIx/ObjectMapper/Query/Base.pm index 6f4f90e..94deb08 100644 --- a/lib/DBIx/ObjectMapper/Query/Base.pm +++ b/lib/DBIx/ObjectMapper/Query/Base.pm @@ -1,7 +1,7 @@ package DBIx::ObjectMapper::Query::Base; use strict; use warnings; -use Scalar::Util qw(weaken); +use Scalar::Util qw(weaken blessed); use Carp::Clan qw/^DBIx::ObjectMapper/; sub new { @@ -37,6 +37,57 @@ sub as_sql { return $self->builder->as_sql(@_); } +# By default, OM supports mutable query building. clone() allows us to extend +# queries without affecting the original queries. +sub clone { + my $self = shift; + my ($clone_hash, $clone_array, $clone_element); + + $clone_hash = sub { + my $hash = shift; + return { + map { + $_ => $clone_element->($hash->{$_}) + } keys %$hash + }; + }; + + $clone_array = sub { + my $array = shift; + return [ + map { + $clone_element->($_) + } @$array + ]; + }; + + $clone_element = sub { + my $element = shift; + + if (!ref $element || ref $element eq 'CODE') { + return $element; + } + elsif (blessed $element && $element->can('clone')) { + return $element->clone; + } + elsif (blessed $element) { + return $element; + } + elsif (ref $element eq 'HASH') { + return $clone_hash->($element); + } + elsif (ref $element eq 'ARRAY') { + return $clone_array->($element); + } + else { + use Data::Dumper; + die "I do not know how to clone: " . Dumper($element); + } + }; + + return bless($clone_hash->($self), blessed $self); +} + sub DESTROY { my $self = shift; warn "DESTROY $self" if $ENV{MAPPER_DEBUG}; diff --git a/lib/DBIx/ObjectMapper/Query/Select.pm b/lib/DBIx/ObjectMapper/Query/Select.pm index 427f2d3..37cfb81 100644 --- a/lib/DBIx/ObjectMapper/Query/Select.pm +++ b/lib/DBIx/ObjectMapper/Query/Select.pm @@ -18,6 +18,7 @@ sub new { for my $meth ( qw( column from where join order_by group_by limit offset having add_column add_where add_join + add_inner_join add_left_join add_order_by add_group_by add_having ) ) { *{"$pkg\::$meth"} = sub { my $self = shift; diff --git a/lib/DBIx/ObjectMapper/Query/Union.pm b/lib/DBIx/ObjectMapper/Query/Union.pm new file mode 100644 index 0000000..113f5a9 --- /dev/null +++ b/lib/DBIx/ObjectMapper/Query/Union.pm @@ -0,0 +1,58 @@ +package DBIx::ObjectMapper::Query::Union; +use strict; +use warnings; +use Carp::Clan qw/^DBIx::ObjectMapper/; +use Data::Page; +use base qw(DBIx::ObjectMapper::Query::Base); + +sub new { + my $class = shift; + my $self = $class->SUPER::new(@_); + $self->builder( $self->engine->query->union ); + return $self; +} + +{ + no strict 'refs'; + my $pkg = __PACKAGE__; + for my $meth ( qw( order_by group_by + having + limit offset + add_order_by add_group_by + add_having + sets add_sets ) ) { + *{"$pkg\::$meth"} = sub { + my $self = shift; + $self->builder->$meth(@_); + return $self; + }; + } +}; + + +sub pager { + my $self = shift; + my $page = shift || 1; + confess 'page must be integer.' unless $page =~ /^\d+$/ and $page > 0; + + my $limit = $self->builder->limit->[0] || confess "limit is not set."; + $self->offset( ( $page - 1 ) * $limit ); + + my $pager = Data::Page->new(); + $pager->total_entries( $self->count ); + $pager->entries_per_page( $self->builder->{limit}->[0] || 0 ); + $pager->current_page( $page ); + return $pager; +} + +sub execute { + my $self = shift; + return $self->engine->union( $self->builder, $self->callback, @_ ); +} + +sub as_sql { + my $self = shift; + return $self->builder->as_sql(@_); +} + +1; diff --git a/lib/DBIx/ObjectMapper/SQL/Base.pm b/lib/DBIx/ObjectMapper/SQL/Base.pm index 3449c5b..12304ab 100644 --- a/lib/DBIx/ObjectMapper/SQL/Base.pm +++ b/lib/DBIx/ObjectMapper/SQL/Base.pm @@ -120,10 +120,13 @@ sub convert_columns_to_sql { my $class = shift; return unless @_; - return join ', ', map { + my @converted_columns = map {[ $class->convert_column_alias_to_sql( - $class->convert_func_to_sql($_) ); - } grep { defined $_ } @_; + $class->convert_func_to_sql($_) ) + ]} grep { defined $_ } @_; + + return (join ', ', map {$_->[0]} @converted_columns), + map {@$_[1..$#$_]} @converted_columns; } sub convert_func_to_sql { @@ -131,6 +134,7 @@ sub convert_func_to_sql { return unless $func; return $func unless ref $func; return $$func if ref $func eq 'SCALAR'; + return $func->as_sql('parts') if (blessed($func) && $func->can('as_sql')); return $func unless ref $func eq 'HASH'; my $key = ( keys %$func )[0]; @@ -143,6 +147,8 @@ sub convert_func_to_sql { . ')'; } +sub as_to_sql { $_[0]->{driver} eq 'Oracle' ? ' ' : ' AS ' } + sub convert_column_alias_to_sql { my ($class, $param) = @_; return $param unless $param; @@ -150,13 +156,14 @@ sub convert_column_alias_to_sql { return $$param if ref $param eq 'SCALAR'; return $param unless ref $param eq 'ARRAY'; - my $col = $class->convert_func_to_sql( $param->[0] ); + my ($col, @binds) = $class->convert_func_to_sql( $param->[0] ); my $alias = $param->[1]; + my $as = $class->as_to_sql; if( $col and $alias ) { - return $col . ' AS ' . $alias; + return ("$col$as$alias", @binds); } else { - return $col; + return ($col, @binds); } } @@ -166,7 +173,7 @@ sub convert_tables_to_sql { my @bind; for my $t ( @_ ) { - my ($table_stm, @table_bind) = $class->convert_table_to_sql(@_); + my ($table_stm, @table_bind) = $class->convert_table_to_sql($t); push @stm, $table_stm; push @bind, @table_bind if @table_bind; } @@ -180,12 +187,15 @@ sub convert_table_to_sql { my ($stm, @bind); if( ref $table eq 'ARRAY' ) { my ($t_stm, @t_bind) = $class->convert_table_to_sql( $table->[0] ); - $stm = $t_stm . ' AS ' . $table->[1]; + $stm = $t_stm . $class->as_to_sql . $table->[1]; push @bind, @t_bind if @t_bind; } elsif( blessed $table and $table->can('as_sql') ) { ( $stm, @bind ) = $table->as_sql('parts'); + if($table->isa('DBIx::ObjectMapper::Query::Union')) { + $stm = '( ' . $stm . ' )'; + } } elsif( ref $table eq 'SCALAR' ) { $stm = $$table; @@ -309,6 +319,10 @@ sub convert_condition_to_sql { $stm .= ' BETWEEN ? AND ?'; push @bind, @{ $w->[2] }; } + elsif( uc($w->[1]) eq 'LIKE' and @{$w->[2]} == 2 ) { + $stm .= ' LIKE ? ESCAPE ?'; + push @bind, @{ $w->[2] }; + } else { confess 'Invalid Parameters in WHERE clause.(' . join( ',', @$w ) . ')'; diff --git a/lib/DBIx/ObjectMapper/SQL/Select.pm b/lib/DBIx/ObjectMapper/SQL/Select.pm index 13b7c0d..88effd2 100644 --- a/lib/DBIx/ObjectMapper/SQL/Select.pm +++ b/lib/DBIx/ObjectMapper/SQL/Select.pm @@ -13,7 +13,7 @@ __PACKAGE__->initdata({ limit => 0, offset => 0, having => [], - driver => undef, # Pg, mysql, SQLite ... + driver => '', # Pg, mysql, SQLite ... }); __PACKAGE__->accessors({ @@ -24,13 +24,15 @@ __PACKAGE__->accessors({ num_check => [qw(limit offset)], }); -sub as_sql { +sub _generate_subquery { my $self = shift; - my $mode = shift; my @bind; my ($from, @from_bind) = $self->from_as_sql; - my $stm = 'SELECT ' . ($self->column_as_sql || '*') . ' FROM ' . $from; + my ($col, @col_bind) = $self->column_as_sql; + + my $stm = 'SELECT ' . ($col || '*') . ' FROM ' . $from; + push @bind, @col_bind if @col_bind; push @bind, @from_bind if @from_bind; my ($join_stm, @join_bind) = $self->join_as_sql; @@ -41,25 +43,74 @@ sub as_sql { $stm .= ' WHERE ' . $where_stm if $where_stm; push @bind, @where_bind if @where_bind; - if( my $group_by = $self->group_by_as_sql ) { - $stm .= ' GROUP BY ' . $group_by; - } + my ($group_by, @group_binds) = $self->group_by_as_sql; + $stm .= ' GROUP BY ' . $group_by if $group_by; + push @bind, @group_binds if @group_binds; my ($having_stm, @having_bind) = $self->having_as_sql; $stm .= ' HAVING ' . $having_stm if $having_stm; push @bind, @having_bind if @having_bind; - if( my $order_by = $self->order_by_as_sql ) { - $stm .= ' ORDER BY ' . $order_by; + my ($order_by, @order_binds) = $self->order_by_as_sql; + $stm .= ' ORDER BY ' . $order_by if $order_by; + push @bind, @order_binds if @order_binds; + + return ($stm, @bind); +} + +sub _final_column_name { + my ($class, $raw_column) = @_; + + if (!ref $raw_column || ref $raw_column eq 'HASH') { + return $class->convert_func_to_sql( $raw_column ); + } + + if (defined($raw_column->[1])) { + return $raw_column->[1]; } + my ($column) = $class->convert_func_to_sql( $raw_column->[0] ); + return $column; +} + +sub _apply_oracle_limits { + my ($self, $original_stm, @bind) = @_; + + my $lower_limit = $self->offset ? $self->offset_as_sql : 1; + return ($original_stm, @bind) if !$lower_limit; + my $upper_limit = $self->limit ? $self->limit_as_sql + $lower_limit - 1 : undef; + + my $final_column_list = join(', ', map {$self->_final_column_name($_)} @{$self->column}); + my $stm = "SELECT $final_column_list FROM ( " . + "SELECT /*+ first_row */ rownum AS oracle_rownum_XYZZY, $final_column_list FROM ( $original_stm ) " . + ") WHERE oracle_rownum_XYZZY >= $lower_limit" . + (defined($upper_limit) ? " AND oracle_rownum_XYZZY <= $upper_limit" : ""); + + return ($stm, @bind); +} + +sub _apply_limit_syntax { + my ($self, $stm, @bind) = @_; + + my $method = $self->limit_syntax->{ lc( $self->{driver} ) }; + $method = $self->limit_syntax->{default} + unless $method and $self->can($method); + if( my $add_stm = $self->${method}() ) { + $stm .= $add_stm; + } + + return ($stm, @bind); +} + +sub as_sql { + my $self = shift; + my $mode = shift; + + my ($stm, @bind) = $self->_generate_subquery(); + if( $self->limit || $self->offset ) { - my $method = $self->limit_syntax->{ lc( $self->{driver} ) }; - $method = $self->limit_syntax->{default} - unless $method and $self->can($method); - if( my $add_stm = $self->${method}() ) { - $stm .= $add_stm; - } + ($stm, @bind) = $self->{driver} eq 'Oracle' ? $self->_apply_oracle_limits($stm, @bind) : + $self->_apply_limit_syntax($stm, @bind); } if( $mode and $mode eq 'parts' ) { @@ -69,6 +120,29 @@ sub as_sql { return wantarray ? ($stm, @bind) : $stm; } +sub _add_joins_with_param { + my $self = shift; + my $param = shift; + my @join_params = @_; + my @real_join_expressions = (); + while (@join_params) { + my $table = shift @join_params; + my $conditions = shift @join_params; + push @real_join_expressions, [$table, $conditions, $param]; + } + return $self->add_join(@real_join_expressions); +} + +sub add_inner_join { + my $self = shift; + $self->_add_joins_with_param('INNER', @_); +} + +sub add_left_join { + my $self = shift; + $self->_add_joins_with_param('LEFT', @_); +} + 1; __END__ diff --git a/lib/DBIx/ObjectMapper/SQL/Set.pm b/lib/DBIx/ObjectMapper/SQL/Set.pm index a15c6d6..104b08a 100644 --- a/lib/DBIx/ObjectMapper/SQL/Set.pm +++ b/lib/DBIx/ObjectMapper/SQL/Set.pm @@ -53,17 +53,17 @@ sub as_sql { map { '( ' . $_ . ' )' } @list_stm ); - if( my $group_by = $self->group_by_as_sql ) { - $stm .= ' GROUP BY ' . $group_by; - } + my ($group_by, @group_binds) = $self->group_by_as_sql; + $stm .= ' GROUP BY ' . $group_by if $group_by; + push @bind, @group_binds if @group_binds; my ( $having_stm, @having_bind ) = $self->having_as_sql; $stm .= ' HAVING ' . $having_stm if $having_stm; push @bind, @having_bind if @having_bind; - if( my $order_by = $self->order_by_as_sql ) { - $stm .= ' ORDER BY ' . $order_by; - } + my ($order_by, @order_binds) = $self->order_by_as_sql; + $stm .= ' ORDER BY ' . $order_by if $order_by; + push @bind, @order_binds if @order_binds; if( $self->limit || $self->offset ) { my $method = $self->limit_syntax->{ lc( $self->{driver} ) }; diff --git a/lib/DBIx/ObjectMapper/Utils.pm b/lib/DBIx/ObjectMapper/Utils.pm index 35dcddd..8cdb5ff 100644 --- a/lib/DBIx/ObjectMapper/Utils.pm +++ b/lib/DBIx/ObjectMapper/Utils.pm @@ -5,7 +5,6 @@ use Carp::Clan qw/^DBIx::ObjectMapper/; use Try::Tiny; use Class::MOP; use Scalar::Util; -use Hash::Merge; use Class::Inspector; sub installed { Class::Inspector->installed($_[0]) } @@ -79,12 +78,15 @@ sub is_deeply { } } +# This function is intended to merge the values behind the toplevel keys in +# two hashrefs, such that the second/new hashref overrides values in the +# first/old hashref or supplies them if they are missing. sub merge_hashref { my ( $old, $new ) = @_; - croak "Invlid Parameter. usage: merge_hashref(HashRef,HashRef)" + croak "Invalid Parameter. usage: merge_hashref(HashRef,HashRef)" unless ref $old eq 'HASH' and ref $new eq 'HASH'; - Hash::Merge::set_behavior( 'RIGHT_PRECEDENT' ); - return Hash::Merge::merge($old,$new); + + return {%$old, %$new}; } sub camelize { diff --git a/t/01_sql.t b/t/01_sql.t index b987244..40bbf36 100644 --- a/t/01_sql.t +++ b/t/01_sql.t @@ -469,3 +469,73 @@ DBIx::ObjectMapper::SQL->select->from('hoge')->join( ['fuga', undef, 'natural'] --- expected SELECT * FROM hoge NATURAL JOIN fuga <= + +=== left outer join default +--- input +DBIx::ObjectMapper::SQL->select->from('hoge')->join( ['fuga', undef] ); + +--- expected +SELECT * FROM hoge LEFT OUTER JOIN fuga <= + +=== left join explicit function +--- input +DBIx::ObjectMapper::SQL->select->from('hoge')->add_left_join( 'fuga' ); + +--- expected +SELECT * FROM hoge LEFT JOIN fuga <= + +=== inner join explicit function +--- input +DBIx::ObjectMapper::SQL->select->from('hoge')->add_inner_join( 'fuga' ); + +--- expected +SELECT * FROM hoge INNER JOIN fuga <= + +=== inner join with condition +--- input +DBIx::ObjectMapper::SQL->select->from('hoge')->join( ['fuga', [\'id=1'], 'INNER'] ); + +--- expected +SELECT * FROM hoge INNER JOIN fuga ON ( id=1 ) <= + +=== inner join explicit function with condition +--- input +DBIx::ObjectMapper::SQL->select->from('hoge')->add_inner_join( 'fuga' => [\'id=1'] ); + +--- expected +SELECT * FROM hoge INNER JOIN fuga ON ( id=1 ) <= + +=== additive inner join with condition +--- input +DBIx::ObjectMapper::SQL->select->from('hoge')->join( ['moo', undef, 'INNER'] )->add_join( ['fuga', [\'id=1'], 'INNER'] ); + +--- expected +SELECT * FROM hoge INNER JOIN moo INNER JOIN fuga ON ( id=1 ) <= + +=== additive inner join explicit function with condition +--- input +DBIx::ObjectMapper::SQL->select->from('hoge')->join( ['moo', undef, 'INNER'] )->add_inner_join( 'fuga' => [\'id=1'] ); + +--- expected +SELECT * FROM hoge INNER JOIN moo INNER JOIN fuga ON ( id=1 ) <= + +=== additive left join explicit function with condition +--- input +DBIx::ObjectMapper::SQL->select->from('hoge')->join( ['moo', undef, 'INNER'] )->add_left_join( 'fuga' => [\'id=1'] ); + +--- expected +SELECT * FROM hoge INNER JOIN moo LEFT JOIN fuga ON ( id=1 ) <= + +=== two inner joins explicit function with one condition +--- input +DBIx::ObjectMapper::SQL->select->from('hoge')->add_inner_join( 'fuga' => [\'id=1'], 'moo' ); + +--- expected +SELECT * FROM hoge INNER JOIN fuga ON ( id=1 ) INNER JOIN moo <= + +=== two inner joins explicit function with two conditions +--- input +DBIx::ObjectMapper::SQL->select->from('hoge')->add_inner_join( 'fuga' => [\'id=1'], 'moo' => [\'moo_id=3'] ); + +--- expected +SELECT * FROM hoge INNER JOIN fuga ON ( id=1 ) INNER JOIN moo ON ( moo_id=3 ) <= diff --git a/t/09_query/000_basic.t b/t/09_query/000_basic.t index d4596d1..2069a7f 100644 --- a/t/09_query/000_basic.t +++ b/t/09_query/000_basic.t @@ -54,6 +54,22 @@ is ref($query->insert), 'DBIx::ObjectMapper::Query::Insert'; }; }; +{ # select with clone + my $regular_query1 = $query->select->from('artist'); + my $regular_query2 = $regular_query1->order_by('id'); + my $clone_query1 = $query->select->from('artist'); + my $clone_query2 = $clone_query1->clone->order_by('id'); + + is($regular_query1->as_sql, $regular_query2->as_sql, 'Without clone, they are the same'); + is($regular_query2->as_sql, $clone_query2->as_sql, 'Same final effects'); + isnt($clone_query1->as_sql, $clone_query2->as_sql, 'Truly different queries'); + + my $sql = $clone_query1->as_sql; + $sql =~ s/\W/./g; + + like($clone_query2->as_sql, qr($sql), 'But derivative'); +}; + { # select with callback my $it = $query->select( sub { { id => $_[0]->[0], name => $_[0]->[1] } } diff --git a/t/10_meta/002_table.t b/t/10_meta/002_table.t index c267f8e..cb35cf4 100644 --- a/t/10_meta/002_table.t +++ b/t/10_meta/002_table.t @@ -117,8 +117,8 @@ use DBIx::ObjectMapper::Metadata::Sugar qw(:all); ok my $meta = DBIx::ObjectMapper::Metadata::Table->new( testmetadata => [ Col( name => Text(undef, utf8 => 1) ), - Col( created => Default { time() } ), - Col( updated => OnUpdate { time() }), + Col( created => Time(), Default { time() } ), + Col( updated => Time(), OnUpdate { time() } ), ], { engine => $engine, diff --git a/t/10_meta/009_trigger.t b/t/10_meta/009_trigger.t index 5b1c204..2502c2a 100644 --- a/t/10_meta/009_trigger.t +++ b/t/10_meta/009_trigger.t @@ -92,10 +92,23 @@ $artist->delete->execute; is $trigger_cnt{before_delete}, 1; is $trigger_cnt{after_delete}, 1; -eval "require Test::Memory::Cycle"; -unless( $@ ) { +SKIP: { + eval "require Test::Memory::Cycle"; + if ($@) { + skip("Error requiring Test::Memory::Cycle: $@", 2); + } + + # Versions of Devel::Cycle less than 1.09 had a bug when looking at + # closed-over variables in coderefs: Devel::Cycle attempted to dereference + # all such variables as scalar references. This dies when, for example, + # the variable is a hashref. Skip these tests if Devel::Cycle is an + # earlier version. + if (Devel::Cycle->VERSION lt '1.09') { + skip("Skipped memory cycle test because your version (" . Devel::Cycle->VERSION. ") of Devel::Cycle is ancient and buggy", 2); + } + Test::Memory::Cycle::memory_cycle_ok( $artist ); Test::Memory::Cycle::memory_cycle_ok( $meta ); -} +}; done_testing; diff --git a/t/12_session/030_inherit_polymorphic_tree.t b/t/12_session/030_inherit_polymorphic_tree.t index f89e3f6..8776f94 100644 --- a/t/12_session/030_inherit_polymorphic_tree.t +++ b/t/12_session/030_inherit_polymorphic_tree.t @@ -14,8 +14,8 @@ my $mapper = DBIx::ObjectMapper->new( q{CREATE TABLE employee(id integer primary key, type text, name text, memo text)}, q{CREATE TABLE engineer(id integer primary key, language text, FOREIGN KEY(id) REFERENCES person(id))}, q{CREATE TABLE manager(id integer primary key, type text, golf_swing text, FOREIGN KEY(id) REFERENCES employee(id))}, - q{CREATE TABLE geek_manager (id integer primary key, language TEXT, memo text, FOREIGN KEY (id) REFERENCES employee(id))}, - q{CREATE TABLE deadshit_manager (id integer primary key, iq integer, memo text, FOREIGN KEY (id) REFERENCES employee(id))}, + q{CREATE TABLE geek_manager (id integer primary key, type text, language TEXT, memo text, FOREIGN KEY (id) REFERENCES employee(id))}, + q{CREATE TABLE deadshit_manager (id integer primary key, type text, iq integer, memo text, FOREIGN KEY (id) REFERENCES employee(id))}, ], }), ); @@ -184,7 +184,8 @@ $mapper->maps( properties => { old_language => { isa => $mapper->metadata->t('geek_manager')->c('language'), - } + }, + t => { isa => $mapper->metadata->t('geek_manager')->c('type') }, } } ); @@ -194,6 +195,11 @@ $mapper->maps( inherits => 'My::Manager', polymorphic_on => 'type', polymorphic_identity => 'deadshit_manager', + attributes => { + properties => { + t => { isa => $mapper->metadata->t('deadshit_manager')->c('type') }, + } + } ); my @languages = ( diff --git a/t/12_session/032_class_table_inheritance_with_relation.t b/t/12_session/032_class_table_inheritance_with_relation.t index 6aca0c5..70822ac 100644 --- a/t/12_session/032_class_table_inheritance_with_relation.t +++ b/t/12_session/032_class_table_inheritance_with_relation.t @@ -13,7 +13,7 @@ my $mapper = DBIx::ObjectMapper->new( on_connect_do => [ q{CREATE TABLE language (id integer primary key, name text)}, q{CREATE TABLE employee(id integer primary key, type text)}, - q{CREATE TABLE engineer(id integer primary key, language_id integer REFERENCES language(id), FOREIGN KEY(id) REFERENCES person(id))}, + q{CREATE TABLE engineer(id integer primary key, type text, language_id integer REFERENCES language(id), FOREIGN KEY(id) REFERENCES person(id))}, q{CREATE TABLE project (id integer primary key, engineer_id integer REFERENCES engineer(id) )}, ], }), @@ -39,6 +39,12 @@ my $project = $mapper->metadata->t( 'project' => 'autoload' ); return $self->{id}; } + sub type { + my $self = shift; + $self->{type} = shift if @_; + return $self->{type}; + } + 1; }; diff --git a/t/14_oracle/paging.t b/t/14_oracle/paging.t new file mode 100644 index 0000000..11d0410 --- /dev/null +++ b/t/14_oracle/paging.t @@ -0,0 +1,60 @@ +use strict; +use warnings FATAL => 'all'; +use Test::More; +use DBIx::ObjectMapper::SQL; + +#-- Regular query +my $query = DBIx::ObjectMapper::SQL->select + ->column(['g', 'h'], 'i') + ->from(['foo', 'f']) + ->where(['x', '=', 'something']) + ->limit(10) + ->offset(4); + +is_deeply( + [$query->as_sql], + ["SELECT g AS h, i FROM foo AS f WHERE ( x = ? ) LIMIT 4, 10", 'something'], + 'Default select statement format' +); + + +#-- Now we give it the Oracle flavor +$query->{driver} = 'Oracle'; + +is_deeply( + [$query->as_sql], + ["SELECT h, i FROM ( " . + "SELECT /*+ first_row */ rownum AS oracle_rownum_XYZZY, h, i FROM ( " . + "SELECT g h, i FROM foo f WHERE ( x = ? ) " . + ") " . + ") WHERE oracle_rownum_XYZZY >= 4 AND oracle_rownum_XYZZY <= 13", + 'something'], + 'Oracle select statement format' +); + + +#-- No limits regular +$query = DBIx::ObjectMapper::SQL->select + ->column(['g', 'h'], ['i']) + ->from(['foo', 'f']) + ->where(['x', '=', 'something']); + +is_deeply( + [$query->as_sql], + ["SELECT g AS h, i FROM foo AS f WHERE ( x = ? )", 'something'], + 'Default no limits select statement format' +); + + +#-- No limits Oracle +$query->{driver} = 'Oracle'; + +is_deeply( + [$query->as_sql], + ["SELECT g h, i FROM foo f WHERE ( x = ? )", 'something'], + 'Oracle no limits select statement format' +); + + +done_testing; + diff --git a/t/20_misc/100_oracle.t b/t/20_misc/100_oracle.t new file mode 100644 index 0000000..083f956 --- /dev/null +++ b/t/20_misc/100_oracle.t @@ -0,0 +1,181 @@ +use strict; +use warnings; +use Test::More; +use DateTime; +use DateTime::Duration; +use DateTime::Format::Oracle; +use DBIx::ObjectMapper::Engine::DBI; +use DBIx::ObjectMapper; + +my ($dsn, $user, $pass, $schema) = @ENV{map { "MAPPER_TEST_ORACLE_${_}" } qw/DSN USER PASS SCHEMA/}; +plan skip_all => 'Set $ENV{MAPPER_TEST_ORACLE_DSN}, _USER, _PASS, and _SCHEMA to run this test.' unless ($dsn && $user); + +my $engine = DBIx::ObjectMapper::Engine::DBI->new({ + dsn => $dsn, + username => $user, + password => $pass, + db_schema => $schema, + on_connect_do => [ + + # Ensure date format for DATE fields is consistent before we start inserting rows + "alter session set nls_date_format = '" . + DateTime::Format::Oracle->nls_date_format . + "'", + + # ...and the timestamp format for TIMESTAMP fields + "alter session set nls_timestamp_format = '" . + DateTime::Format::Oracle->nls_date_format . + "'", + + # Create test table + q{ +CREATE TABLE test_types ( + id INTEGER PRIMARY KEY, + i INT, + num NUMERIC(10,2), + f NUMBER(5,4), + deci DECIMAL(10,2), + photo BLOB, + lblob LONG, + created TIMESTAMP(0) DEFAULT SYSDATE, + modified TIMESTAMP, + dt DATE +) + +} + ], + on_disconnect_do => q{DROP TABLE test_types}, + time_zone => 'Asia/Tokyo', +}); + +my $mapper = DBIx::ObjectMapper->new( engine => $engine ); +$mapper->metadata->autoload_all_tables; +my $table = $mapper->metadata->t( 'TEST_TYPES' ); +my $GIF = 'R0lGODlhAQABAJEAAAAAAP///////wAAACH5BAUUAAIALAAAAAABAAEAAAICVAEAOw== +'; +my $now = DateTime->now( time_zone => 'UTC' ); + + +is $table->c('ID')->type->type, 'numeric'; +is $table->c('I')->type->type, 'numeric'; +is $table->c('NUM')->type->type, 'numeric'; +is $table->c('F')->type->type, 'numeric'; +is $table->c('DECI')->type->type, 'numeric'; +is $table->c('PHOTO')->type->type, 'blob'; +is $table->c('LBLOB')->type->type, 'binary'; +is $table->c('CREATED')->type->type, 'datetime'; +is $table->c('MODIFIED')->type->type, 'datetime'; +is $table->c('DT')->type->type, 'date'; + + +{ + my $r = $table->insert( + ID => 1, + I => 2, + NUM => 0.202927272, + F => 0.202927272, + DECI => 20, + PHOTO => $GIF, + LBLOB => $GIF, + DT => $now, + )->execute; + + # check last_insert_id + is $r->{ID}, 1; +}; + + +{ # find + ok my $d = $table->find(1); + + is $d->{PHOTO}, $GIF; + is $d->{LBLOB}, $GIF; + + is ref($d->{CREATED}), 'DateTime'; + is $d->{CREATED}->time_zone->name, 'Asia/Tokyo'; + ok !$d->{CREATED}->time_zone->is_utc; + is $d->{CREATED}->time_zone->offset_for_datetime($now), 9*60*60; + + ok !$d->{MODIFIEd}; + is $d->{DT}->ymd('-'), $now->ymd('-'); + + ok $d->{I} == 2; + + ok $d->{NUM} == 0.20; + ok $d->{F} == 0.2029; + ok $d->{DECI} == 20.00; + +}; + +{ # search by date + ok my $d = $table->select->where( + $table->c('DT') == $now, + )->first; + is $d->{ID}, 1; +}; + +{ # update + ok $table->update->set( MODIFIED => $now )->where( $table->c('ID') == 1 )->execute; + my $d = $table->find(1); + is $d->{MODIFIED}, $now; +}; + +{ # limit, offset + ok my $r = $table->select->limit(1)->offset(0)->execute; + is $r->first->{ID}, 1; +}; + +$mapper->maps( + $table => 'My::MyOracleTest', + accessors => { auto => 1 }, + constructor => { auto => 1 }, +); + +{ # transaction rollback + my $session = $mapper->begin_session( autocommit => 0 ); + $session->add( My::MyOracleTest->new( ID => 4, I => 10 ) ); + $session->rollback; +}; + +{ # check + my $session = $mapper->begin_session( autocommit => 0 ); + my $attr = $mapper->attribute('My::MyOracleTest'); + is $session->search('My::MyOracleTest')->filter( $attr->p('I') == 10 )->count, 0; +}; + +{ # transaction commit + my $session = $mapper->begin_session( autocommit => 0 ); + $session->add( My::MyOracleTest->new( ID => 5, I => 10 ) ); + $session->commit; +}; + +{ # check + my $session = $mapper->begin_session( autocommit => 0 ); + my $attr = $mapper->attribute('My::MyOracleTest'); + is $session->search('My::MyOracleTest')->filter( $attr->p('I') == 10 )->count, 1; +}; + + +{ # savepoint + my $session = $mapper->begin_session( autocommit => 0 ); + $session->add( My::MyOracleTest->new( ID => 6, I => 11 ) ); + eval { + $session->txn( + sub { + $session->add( My::MyOracleTest->new( ID => 1, I => 11 ) ); + $session->commit; + } + ); + }; + ok $@; + $session->add( My::MyOracleTest->new( ID => 7, I => 11 ) ); + $session->commit; +}; + +{ + my $session = $mapper->begin_session( autocommit => 0 ); + my $attr = $mapper->attribute('My::MyOracleTest'); + is $session->search('My::MyOracleTest')->filter( $attr->p('I') == 11 )->count, 2; +}; + +done_testing;