2 package Bio
::DB
::SeqFeature
::Store
::DBI
::Pg
;
3 use DBD
::Pg
qw(:pg_types);
5 # $Id: Pg.pm 14656 2008-04-14 15:05:37Z lstein $
9 Bio::DB::SeqFeature::Store::DBI::Pg -- PostgreSQL implementation of Bio::DB::SeqFeature::Store
13 use Bio::DB::SeqFeature::Store;
15 # Open the sequence database
16 my $db = Bio::DB::SeqFeature::Store->new(-adaptor => 'DBI::Pg',
17 -dsn => 'dbi:Pg:test');
19 # get a feature from somewhere
20 my $feature = Bio::SeqFeature::Generic->new(...);
23 $db->store($feature) or die "Couldn't store!";
25 # primary ID of the feature is changed to indicate its primary ID
27 my $id = $feature->primary_id;
29 # get the feature back out
30 my $f = $db->fetch($id);
32 # change the feature and update it
34 $db->update($f) or die "Couldn't update!";
38 my @features = $db->fetch_many(@list_of_ids);
41 @features = $db->get_features_by_name('ZK909');
44 @features = $db->get_features_by_alias('sma-3');
47 @features = $db->get_features_by_name('gene');
50 @features = $db->get_features_by_location(-seq_id=>'Chr1',-start=>4000,-end=>600000);
53 @features = $db->get_features_by_attribute({description => 'protein kinase'})
55 # ...by the GFF "Note" field
56 @result_list = $db->search_notes('kinase');
58 # ...by arbitrary combinations of selectors
59 @features = $db->features(-name => $name,
64 -attributes => $attributes);
66 # ...using an iterator
67 my $iterator = $db->get_seq_stream(-name => $name,
72 -attributes => $attributes);
74 while (my $feature = $iterator->next_seq) {
75 # do something with the feature
78 # ...limiting the search to a particular region
79 my $segment = $db->segment('Chr1',5000=>6000);
80 my @features = $segment->features(-type=>['mRNA','match']);
82 # getting & storing sequence information
83 # Warning: this returns a string, and not a PrimarySeq object
84 $db->insert_sequence('Chr1','GATCCCCCGGGATTCCAAAA...');
85 my $sequence = $db->fetch_sequence('Chr1',5000=>6000);
87 # what feature types are defined in the database?
88 my @types = $db->types;
90 # create a new feature in the database
91 my $feature = $db->new_feature(-primary_tag => 'mRNA',
98 Bio::DB::SeqFeature::Store::Pg is the Pg adaptor for
99 Bio::DB::SeqFeature::Store. You will not create it directly, but
100 instead use Bio::DB::SeqFeature::Store-E<gt>new() to do so.
102 See L<Bio::DB::SeqFeature::Store> for complete usage instructions.
104 =head2 Using the Pg adaptor
106 Before you can use the adaptor, you must use the Pgadmin tool to
107 create a database and establish a user account with write
108 permission. In order to use "fast" loading, the user account must have
111 To establish a connection to the database, call
112 Bio::DB::SeqFeature::Store-E<gt>new(-adaptor=E<gt>'DBI::Pg',@more_args). The
113 additional arguments are as follows:
115 Argument name Description
116 ------------- -----------
118 -dsn The database name. You can abbreviate
119 "dbi:Pg:foo" as "foo" if you wish.
121 -user Username for authentication.
123 -pass Password for authentication.
125 -namespace Creates a SCHEMA for the tables. This allows you
126 to have several virtual databases in the same
129 -temp Boolean flag. If true, a temporary database
130 will be created and destroyed as soon as
131 the Store object goes out of scope. (synonym -temporary)
133 -autoindex Boolean flag. If true, features in the database will be
134 reindexed every time they change. This is the default.
137 -tmpdir Directory in which to place temporary files during "fast" loading.
138 Defaults to File::Spec->tmpdir(). (synonyms -dump_dir, -dumpdir, -tmp)
140 -dbi_options A hashref to pass to DBI->connect's 4th argument, the "attributes."
141 (synonyms -options, -dbi_attr)
143 -write Pass true to open database for writing or updating.
145 If successful, a new instance of
146 Bio::DB::SeqFeature::Store::DBI::Pg will be returned.
148 In addition to the standard methods supported by all well-behaved
149 Bio::DB::SeqFeature::Store databases, several following
150 adaptor-specific methods are provided. These are described in the next
157 use base
'Bio::DB::SeqFeature::Store::DBI::mysql';
158 use Bio
::DB
::SeqFeature
::Store
::DBI
::Iterator
;
162 use Bio
::DB
::GFF
::Util
::Rearrange
'rearrange';
165 use constant DEBUG
=>0;
167 use constant MAX_INT
=> 2_147_483_647
;
168 use constant MIN_INT
=> -2_147_483_648
;
169 use constant MAX_BIN
=> 1_000_000_000
; # size of largest feature = 1 Gb
170 use constant MIN_BIN
=> 1000; # smallest bin we'll make - on a 100 Mb chromosome, there'll be 100,000 of these
173 # object initialization
175 # NOTE: most of this code can be refactored and inherited from DBI or DBI::mysql adapter
190 ) = rearrange
(['DSN',
191 ['TEMP','TEMPORARY'],
194 ['DUMP_DIR','DUMPDIR','TMP','TMPDIR'],
196 ['PASS','PASSWD','PASSWORD'],
197 ['OPTIONS','DBI_OPTIONS','DBI_ATTR'],
198 ['WRITE','WRITEABLE'],
204 $dbi_options ||= {pg_server_prepare
=> 0};
205 $writeable = 1 if $is_temporary or $dump_dir;
207 $dsn or $self->throw("Usage: ".__PACKAGE__
."->init(-dsn => \$dbh || \$dsn)");
213 $dsn = "dbi:Pg:$dsn" unless $dsn =~ /^dbi:/;
214 $dbh = DBI
->connect($dsn,$user,$pass,$dbi_options) or $self->throw($DBI::errstr
);
216 $dbh->do('set client_min_messages=warning') if $dbh;
217 $self->{'original_arguments'} = {
221 'dbh_options' => $dbi_options,
224 $self->{dbh
}->{InactiveDestroy
} = 1;
225 $self->{is_temp
} = $is_temporary;
226 $self->{writeable
} = $writeable;
227 $self->{namespace
} = $namespace || $schema || 'public';
228 $self->schema($self->{namespace
});
230 $self->default_settings;
231 $self->autoindex($autoindex) if defined $autoindex;
232 $self->dumpdir($dump_dir) if $dump_dir;
233 if ($self->is_temp) {
234 # warn "creating a temp database isn't supported";
235 #$self->init_tmp_database();
236 $self->init_database('erase');
238 $self->init_database('erase');
242 sub table_definitions
{
247 id serial primary key,
252 strand int default 0,
255 indexed int default 1,
256 object bytea not null
258 CREATE INDEX feature_stuff ON feature(seqid,tier,bin,typeid);
259 CREATE INDEX feature_typeid ON feature(typeid);
262 locationlist
=> <<END,
264 id serial primary key,
265 seqname text not null
266 ); CREATE INDEX locationlist_seqname ON locationlist(seqname);
271 id serial primary key,
273 ); CREATE INDEX typelist_tab ON typelist(tag);
279 display_name int default 0
281 CREATE INDEX name_id ON name( id );
282 CREATE INDEX name_name ON name( name );
283 CREATE INDEX name_lcname ON name( lower(name) );
284 CREATE INDEX name_lcname_varchar_patt_ops ON name USING BTREE (lower(name) varchar_pattern_ops);
290 attribute_id int not null,
293 CREATE INDEX attribute_id ON attribute(id);
294 CREATE INDEX attribute_id_val ON attribute(attribute_id,SUBSTR(attribute_value, 1, 10));
297 attributelist
=> <<END,
299 id serial primary key,
302 CREATE INDEX attributelist_tag ON attributelist(tag);
304 parent2child
=> <<END,
309 CREATE UNIQUE INDEX parent2child_id_child ON parent2child(id,child);
314 name text primary key,
321 "offset" int not null,
323 primary key(id,"offset")
327 interval_stats
=> <<END,
332 cum_count int not null
334 CREATE UNIQUE INDEX interval_stats_id ON interval_stats(typeid,seqid,bin);
340 my ($self, $schema) = @_;
341 $self->{'schema'} = $schema if defined($schema);
343 $self->_check_for_namespace();
344 $self->dbh->do("SET search_path TO " . $self->{'schema'} );
346 $self->dbh->do("SET search_path TO public");
348 return $self->{'schema'};
351 # wipe database clean and reinstall schema
357 my $dbh = $self->dbh;
358 my $namespace = $self->namespace;
359 my $tables = $self->table_definitions;
360 my $temporary = $self->is_temp ?
'TEMPORARY' : '';
361 foreach (keys %$tables) {
362 next if $_ eq 'meta'; # don't get rid of meta data!
363 my $table = $self->_qualify($_);
364 $dbh->do("DROP TABLE IF EXISTS $table") if $erase;
365 my @table_exists = $dbh->selectrow_array("SELECT * FROM pg_tables WHERE tablename = '$table' AND schemaname = '$self->namespace'");
366 if (!scalar(@table_exists)) {
367 my $query = "CREATE $temporary TABLE $table $tables->{$_}";
368 $dbh->do($query) or $self->throw($dbh->errstr);
371 $self->subfeatures_are_indexed(1) if $erase;
375 sub maybe_create_meta
{
377 return unless $self->writeable;
378 my $namespace = $self->namespace;
379 my $table = $self->_qualify('meta');
380 my $tables = $self->table_definitions;
381 my $temporary = $self->is_temp ?
'TEMPORARY' : '';
382 my @table_exists = $self->dbh->selectrow_array("SELECT * FROM pg_tables WHERE tablename = 'meta' AND schemaname = '$namespace'");
383 $self->dbh->do("CREATE $temporary TABLE $table $tables->{meta}")
384 unless @table_exists;
388 # check if the namespace/schema exists, if not create it
391 sub _check_for_namespace
{
393 my $namespace = $self->namespace;
394 return if $namespace eq 'public';
395 my $dbh = $self->dbh;
396 my @schema_exists = $dbh->selectrow_array("SELECT * FROM pg_namespace WHERE nspname = '$namespace'");
397 if (!scalar(@schema_exists)) {
398 my $query = "CREATE SCHEMA $namespace";
399 $dbh->do($query) or $self->throw($dbh->errstr);
401 # if temp parameter is set and schema created for this process then enable removal in remove_namespace()
402 if ($self->is_temp) {
403 $self->{delete_schema
} = 1;
409 # Overiding inherited mysql _qualify (We do not need to qualify for PostgreSQL as we have set the search_path above)
413 my $table_name = shift;
418 # when is_temp is set and the schema did not exist beforehand then we are able to remove it
420 sub remove_namespace
{
422 if ($self->{delete_schema
}) {
423 my $namespace = $self->namespace;
424 $self->dbh->do("DROP SCHEMA $namespace") or $self->throw($self->dbh->errstr);
428 ####Overiding the inherited mysql function _prepare
433 my $dbh = $self->dbh;
434 my $schema = $self->{namespace
};
437 $self->_check_for_namespace();
438 $dbh->do("SET search_path TO " . $self->{'schema'} );
440 $dbh->do("SET search_path TO public");
442 my $sth = $dbh->prepare_cached($query, {}, 3) or
443 $self->throw($dbh->errstr);
447 sub _finish_bulk_update
{
449 my $dbh = $self->dbh;
450 my $dir = $self->{dumpdir
} || '.';
451 for my $table ('feature',$self->index_tables) {
452 my $fh = $self->dump_filehandle($table);
453 my $path = $self->dump_path($table);
455 my $qualified_table = $self->_qualify($table);
456 copy
($path, "$path.bak");
457 # Get stuff from file into STDIN so we don't have to be superuser
458 open my $FH, '<', $path or $self->throw("Could not read file '$path': $!");
459 print STDERR
"Loading file $path\n";
460 $dbh->do("COPY $qualified_table FROM STDIN CSV QUOTE '''' DELIMITER '\t'") or $self->throw($dbh->errstr);
461 while (my $line = <$FH>) {
462 $dbh->pg_putline($line);
464 $dbh->pg_endcopy() or $self->throw($dbh->errstr);
468 delete $self->{bulk_update_in_progress
};
469 delete $self->{filehandles
};
474 # Add a subparts to a feature. Both feature and all subparts must already be in database.
476 sub _add_SeqFeature
{
479 # special purpose method for case when we are doing a bulk update
480 return $self->_dump_add_SeqFeature(@_) if $self->{bulk_update_in_progress
};
485 my $dbh = $self->dbh;
486 local $dbh->{RaiseError
} = 1;
488 my $child_table = $self->_parent2child_table();
491 my $querydel = "DELETE FROM $child_table WHERE id = ? AND child = ?";
492 my $query = "INSERT INTO $child_table (id,child) VALUES (?,?)";
493 my $sthdel = $self->_prepare($querydel);
494 my $sth = $self->_prepare($query);
496 my $parent_id = (ref $parent ?
$parent->primary_id : $parent)
497 or $self->throw("$parent should have a primary_id");
499 $self->begin_work or $self->throw($dbh->errstr);
501 for my $child (@children) {
502 my $child_id = ref $child ?
$child->primary_id : $child;
503 defined $child_id or die "no primary ID known for $child";
504 $sthdel->execute($parent_id, $child_id);
505 $sth->execute($parent_id,$child_id);
511 warn "Transaction aborted because $@";
521 # because this is a reserved word in postgresql
523 # get primary sequence between start and end
525 sub _fetch_sequence
{
527 my ($seqid,$start,$end) = @_;
529 # backward compatibility to the old days when I liked reverse complementing
530 # dna by specifying $start > $end
532 if (defined $start && defined $end && $start > $end) {
534 ($start,$end) = ($end,$start);
536 $start-- if defined $start;
537 $end-- if defined $end;
539 my $offset1 = $self->_offset_boundary($seqid,$start || 'left');
540 my $offset2 = $self->_offset_boundary($seqid,$end || 'right');
541 my $sequence_table = $self->_sequence_table;
542 my $locationlist_table = $self->_locationlist_table;
544 my $sth = $self->_prepare(<<END);
545 SELECT sequence,"offset"
546 FROM $sequence_table as s,$locationlist_table as ll
555 $sth->execute($seqid,$offset1,$offset2) or $self->throw($sth->errstr);
557 while (my($frag,$offset) = $sth->fetchrow_array) {
558 substr($frag,0,$start-$offset) = '' if defined $start && $start > $offset;
561 substr($seq,$end-$start+1) = '' if defined $end && $end-$start+1 < length($seq);
564 $seq =~ tr/gatcGATC/ctagCTAG/;
570 sub _offset_boundary
{
572 my ($seqid,$position) = @_;
574 my $sequence_table = $self->_sequence_table;
575 my $locationlist_table = $self->_locationlist_table;
578 $sql = $position eq 'left' ?
"SELECT min(\"offset\") FROM $sequence_table as s,$locationlist_table as ll WHERE s.id=ll.id AND ll.seqname=?"
579 :$position eq 'right' ?
"SELECT max(\"offset\") FROM $sequence_table as s,$locationlist_table as ll WHERE s.id=ll.id AND ll.seqname=?"
580 :"SELECT max(\"offset\") FROM $sequence_table as s,$locationlist_table as ll WHERE s.id=ll.id AND ll.seqname=? AND \"offset\"<=?";
582 my $sth = $self->_prepare($sql);
583 my @args = $position =~ /^-?\d+$/ ?
($seqid,$position) : ($seqid);
584 $sth->execute(@args) or $self->throw($sth->errstr);
585 my $boundary = $sth->fetchall_arrayref->[0][0];
592 my ($name,$allow_aliases,$join) = @_;
593 my $name_table = $self->_name_table;
595 my $from = "$name_table as n";
596 my ($match,$string) = $self->_match_sql($name);
598 my $where = "n.id=$join AND lower(n.name) $match";
599 $where .= " AND n.display_name>0" unless $allow_aliases;
600 return ($from,$where,'',$string);
603 sub _search_attributes
{
605 my ($search_string,$attribute_names,$limit) = @_;
606 my @words = map {quotemeta($_)} split /\s+/,$search_string;
607 my $name_table = $self->_name_table;
608 my $attribute_table = $self->_attribute_table;
609 my $attributelist_table = $self->_attributelist_table;
610 my $type_table = $self->_type_table;
611 my $typelist_table = $self->_typelist_table;
613 my @tags = @
$attribute_names;
614 my $tag_sql = join ' OR ',("al.tag=?") x
@tags;
616 my $perl_regexp = join '|',@words;
618 my @wild_card_words = map { "%$_%" } @words;
619 my $sql_regexp = join ' OR ',("a.attribute_value SIMILAR TO ?") x
@words;
621 SELECT name,attribute_value,tl.tag,n.id
622 FROM $name_table as n,$attribute_table as a,$attributelist_table as al,$type_table as t,$typelist_table as tl
624 AND al.id=a.attribute_id
631 $sql .= "LIMIT $limit" if defined $limit;
632 $self->_print_query($sql,@tags,@wild_card_words) if DEBUG
|| $self->debug;
633 my $sth = $self->_prepare($sql);
634 $sth->execute(@tags,@wild_card_words) or $self->throw($sth->errstr);
637 while (my($name,$value,$type,$id) = $sth->fetchrow_array) {
638 my (@hits) = $value =~ /$perl_regexp/ig;
639 my @words_in_row = split /\b/,$value;
640 my $score = int(@hits*100/@words/@words_in_row);
641 push @results,[$name,$value,$score,$type,$id];
644 @results = sort {$b->[2]<=>$a->[2]} @results;
648 # overridden here because the mysql adapter uses
649 # a non-standard query hint
650 sub _attributes_sql
{
652 my ($attributes,$join) = @_;
654 my ($wf,@bind_args) = $self->_make_attribute_where('a','al',$attributes);
655 my ($group_by,@group_args)= $self->_make_attribute_group('a',$attributes);
657 my $attribute_table = $self->_attribute_table;
658 my $attributelist_table = $self->_attributelist_table;
660 my $from = "$attribute_table as a, $attributelist_table as al";
664 AND a.attribute_id=al.id
668 my $group = $group_by;
670 my @args = (@bind_args,@group_args);
671 return ($from,$where,$group,@args);
679 if ($name =~ /(?:^|[^\\])[*?]/) {
680 $name =~ s/(^|[^\\])([%_])/$1\\$2/g;
681 $name =~ s/(^|[^\\])\*/$1%/g;
682 $name =~ s/(^|[^\\])\?/$1_/g;
686 $match = "= lower(?)";
689 return ($match,$string);
692 # overridden because of differences between LIKE behavior in mysql and postgres
693 # as well as case-sensitivity of matches
696 my ($types,$type_table) = @_;
697 my ($primary_tag,$source_tag);
699 my @types = ref $types eq 'ARRAY' ? @
$types : $types;
701 my $typelist = $self->_typelist_table;
702 my $from = "$typelist AS tl";
706 for my $type (@types) {
708 if (ref $type && $type->isa('Bio::DB::GFF::Typename')) {
709 $primary_tag = $type->method;
710 $source_tag = $type->source;
712 ($primary_tag,$source_tag) = split ':',$type,2;
716 push @matches,"lower(tl.tag)=lower(?)";
717 push @args,"$primary_tag:$source_tag";
719 push @matches,"tl.tag ILIKE ?";
720 push @args,"$primary_tag:%";
723 my $matches = join ' OR ',@matches;
726 tl.id=$type_table.typeid
730 return ($from,$where,'',@args);
733 # overridden because mysql adapter uses the non-standard REPLACE syntax
736 my ($variable_name,$value) = @_;
737 my $meta = $self->_meta_table;
739 if (defined $value && $self->writeable) {
740 my $querydel = "DELETE FROM $meta WHERE name = ?";
741 my $query = "INSERT INTO $meta (name,value) VALUES (?,?)";
742 my $sthdel = $self->_prepare($querydel);
743 my $sth = $self->_prepare($query);
744 $sthdel->execute($variable_name);
745 $sth->execute($variable_name,$value) or $self->throw($sth->errstr);
747 $self->{settings_cache
}{$variable_name} = $value;
750 return $self->{settings_cache
}{$variable_name} if exists $self->{settings_cache
}{$variable_name};
751 my $query = "SELECT value FROM $meta as m WHERE m.name=?";
752 my $sth = $self->_prepare($query);
753 # $sth->execute($variable_name) or $self->throw($sth->errstr);
754 unless ($sth->execute($variable_name)) {
755 my $errstr = $sth->errstr;
756 $sth = $self->_prepare("SHOW search_path");
758 $errstr .= "With search_path " . $sth->fetchrow_arrayref->[0] . "\n";
759 $self->throw($errstr);
762 my ($value) = $sth->fetchrow_array;
764 return $self->{settings_cache
}{$variable_name} = $value;
768 # overridden because of use of REPLACE in mysql adapter
770 # Replace Bio::SeqFeatureI into database.
775 my $index_flag = shift || undef;
777 # ?? shouldn't need to do this
778 # $self->_load_class($object);
779 my $id = $object->primary_id;
780 my $features = $self->_feature_table;
782 my $query = "INSERT INTO $features (id,object,indexed,seqid,start,\"end\",strand,tier,bin,typeid) VALUES (?,?,?,?,?,?,?,?,?,?)";
783 my $query_noid = "INSERT INTO $features (object,indexed,seqid,start,\"end\",strand,tier,bin,typeid) VALUES (?,?,?,?,?,?,?,?,?)";
784 my $querydel = "DELETE FROM $features WHERE id = ?";
786 my $sthdel = $self->_prepare($querydel);
787 my $sth = $self->_prepare($query);
788 my $sth_noid = $self->_prepare($query_noid);
790 my @location = $index_flag ?
$self->_get_location_and_bin($object) : (undef)x6
;
792 my $primary_tag = $object->primary_tag;
793 my $source_tag = $object->source_tag || '';
794 $primary_tag .= ":$source_tag";
795 my $typeid = $self->_typeid($primary_tag,1);
798 $sthdel->execute($id);
799 $sth->execute($id,encode_base64
($self->freeze($object), ''),$index_flag||0,@location,$typeid) or $self->throw($sth->errstr);
801 $sth_noid->execute(encode_base64
($self->freeze($object), ''),$index_flag||0,@location,$typeid) or $self->throw($sth->errstr);
804 my $dbh = $self->dbh;
806 $object->primary_id($dbh->last_insert_id(undef, undef, undef, undef, {sequence
=>$features."_id_seq"})) unless defined $id;
808 $self->flag_for_indexing($dbh->last_insert_id(undef, undef, undef, undef, {sequence
=>$features."_id_seq"})) if $self->{bulk_update_in_progress
};
814 Usage : @type_list = $db->types
815 Function: Get all the types in the database
816 Returns : array of Bio::DB::GFF::Typename objects
822 # overridden because "offset" is reserved in postgres
824 # Insert a bit of DNA or protein into the database
826 sub _insert_sequence
{
828 my ($seqid,$seq,$offset) = @_;
829 my $id = $self->_locationid($seqid);
830 my $seqtable = $self->_sequence_table;
831 my $sthdel = $self->_prepare("DELETE FROM $seqtable WHERE id = ? AND \"offset\" = ?");
832 my $sth = $self->_prepare(<<END);
833 INSERT INTO $seqtable (id,"offset",sequence) VALUES (?,?,?)
835 $sthdel->execute($id,$offset);
836 $sth->execute($id,$offset,$seq) or $self->throw($sth->errstr);
839 # overridden because of mysql adapter's use of REPLACE
841 # This subroutine flags the given primary ID for later reindexing
843 sub flag_for_indexing
{
846 my $needs_updating = $self->_update_table;
848 my $querydel = "DELETE FROM $needs_updating WHERE id = ?";
849 my $query = "INSERT INTO $needs_updating VALUES (?)";
850 my $sthdel = $self->_prepare($querydel);
851 my $sth = $self->_prepare($query);
853 $sthdel->execute($id);
854 $sth->execute($id) or $self->throw($self->dbh->errstr);
857 # overridden because of the different ways that mysql and postgres
858 # handle id sequences
861 my ($table,$namefield,$name,$add_if_missing) = @_;
862 my $qualified_table = $self->_qualify($table);
863 my $sth = $self->_prepare(<<END);
864 SELECT id FROM $qualified_table WHERE $namefield=?
866 $sth->execute($name) or die $sth->errstr;
867 my ($id) = $sth->fetchrow_array;
869 return $id if defined $id;
870 return unless $add_if_missing;
872 $sth = $self->_prepare(<<END);
873 INSERT INTO $qualified_table ($namefield) VALUES (?)
875 $sth->execute($name) or die $sth->errstr;
876 my $dbh = $self->dbh;
877 return $dbh->last_insert_id(undef, undef, undef, undef, {sequence
=>$qualified_table."_id_seq"});
880 # overridden because of differences in binding between mysql and postgres adapters
881 # given a statement handler that is expected to return rows of (id,object)
882 # unthaw each object and return a list of 'em
888 $sth->bind_col(1, \
$id);
889 $sth->bind_col(2, \
$o, { pg_type
=> PG_BYTEA
});
890 #while (my ($id,$o) = $sth->fetchrow_array) {
891 while ($sth->fetch) {
892 my $obj = $self->thaw(decode_base64
($o) ,$id);
899 # given a statement handler that is expected to return rows of (id,object)
900 # unthaw each object and return a list of 'em
904 my ($id,$o) = $sth->fetchrow_array;
906 my $obj = $self->thaw(decode_base64
($o) ,$id);
910 ####################################################################################################
911 # SQL Fragment generators
912 ####################################################################################################
914 # overridden because of base64 encoding needed here
916 # special-purpose store for bulk loading - write to a file rather than to the db
923 my $store_fh = $self->dump_filehandle('feature');
924 my $dbh = $self->dbh;
926 my $autoindex = $self->autoindex;
929 my $id = $self->next_id;
930 my ($seqid,$start,$end,$strand,$tier,$bin) = $indexed ?
$self->_get_location_and_bin($obj) : (undef)x6
;
931 my $primary_tag = $obj->primary_tag;
932 my $source_tag = $obj->source_tag || '';
933 $primary_tag .= ":$source_tag";
934 my $typeid = $self->_typeid($primary_tag,1);
936 my $frozen_object = encode_base64
($self->freeze($obj), '');
937 # TODO: Fix this, why does frozen object start with quote but not end with one
938 print $store_fh join("\t",$id,$typeid,$seqid,$start,$end,$strand,$tier,$bin,$indexed,$frozen_object),"\n";
939 $obj->primary_id($id);
940 $self->_update_indexes($obj) if $indexed && $autoindex;
944 # remember whether we are have ever stored a non-indexed feature
945 unless ($indexed or $self->{indexed_flag
}++) {
946 $self->subfeatures_are_indexed(0);
951 sub _enable_keys
{ } # nullop
952 sub _disable_keys
{ } # nullop
954 sub _add_interval_stats_table
{
956 my $tables = $self->table_definitions;
957 my $interval_stats = $self->_interval_stats_table;
958 ##check to see if it exists yet; if it does, just return because
959 ##there is a drop from in the next step
960 my $dbh = $self->dbh;
961 my @table_exists = $dbh->selectrow_array("SELECT * FROM pg_tables WHERE tablename
962 = '$interval_stats' AND schemaname = '".$self->namespace."'");
963 if (!scalar(@table_exists)) {
964 my $query = "CREATE TABLE $interval_stats $tables->{interval_stats}";
965 $dbh->do($query) or $self->throw($dbh->errstr);
969 sub _fetch_indexed_features_sql
{
971 my $features = $self->_feature_table;
973 SELECT typeid,seqid,start-1,"end"
976 ORDER BY typeid,seqid,start