2 package Bio
::DB
::SeqFeature
::Store
::DBI
::Pg
;
3 use DBD
::Pg
qw(:pg_types);
5 # $Id: Pg.pm 14656 2008-04-14 15:05:37Z lstein $
9 Bio::DB::SeqFeature::Store::DBI::Pg -- PostgreSQL implementation of Bio::DB::SeqFeature::Store
13 use Bio::DB::SeqFeature::Store;
15 # Open the sequence database
16 my $db = Bio::DB::SeqFeature::Store->new(-adaptor => 'DBI::Pg',
17 -dsn => 'dbi:Pg:test');
19 # get a feature from somewhere
20 my $feature = Bio::SeqFeature::Generic->new(...);
23 $db->store($feature) or die "Couldn't store!";
25 # primary ID of the feature is changed to indicate its primary ID
27 my $id = $feature->primary_id;
29 # get the feature back out
30 my $f = $db->fetch($id);
32 # change the feature and update it
34 $db->update($f) or die "Couldn't update!";
38 my @features = $db->fetch_many(@list_of_ids);
41 @features = $db->get_features_by_name('ZK909');
44 @features = $db->get_features_by_alias('sma-3');
47 @features = $db->get_features_by_name('gene');
50 @features = $db->get_features_by_location(-seq_id=>'Chr1',-start=>4000,-end=>600000);
53 @features = $db->get_features_by_attribute({description => 'protein kinase'})
55 # ...by the GFF "Note" field
56 @result_list = $db->search_notes('kinase');
58 # ...by arbitrary combinations of selectors
59 @features = $db->features(-name => $name,
64 -attributes => $attributes);
66 # ...using an iterator
67 my $iterator = $db->get_seq_stream(-name => $name,
72 -attributes => $attributes);
74 while (my $feature = $iterator->next_seq) {
75 # do something with the feature
78 # ...limiting the search to a particular region
79 my $segment = $db->segment('Chr1',5000=>6000);
80 my @features = $segment->features(-type=>['mRNA','match']);
82 # getting & storing sequence information
83 # Warning: this returns a string, and not a PrimarySeq object
84 $db->insert_sequence('Chr1','GATCCCCCGGGATTCCAAAA...');
85 my $sequence = $db->fetch_sequence('Chr1',5000=>6000);
87 # what feature types are defined in the database?
88 my @types = $db->types;
90 # create a new feature in the database
91 my $feature = $db->new_feature(-primary_tag => 'mRNA',
98 Bio::DB::SeqFeature::Store::Pg is the Pg adaptor for
99 Bio::DB::SeqFeature::Store. You will not create it directly, but
100 instead use Bio::DB::SeqFeature::Store-E<gt>new() to do so.
102 See L<Bio::DB::SeqFeature::Store> for complete usage instructions.
104 =head2 Using the Pg adaptor
106 Before you can use the adaptor, you must use the Pgadmin tool to
107 create a database and establish a user account with write
108 permission. In order to use "fast" loading, the user account must have
111 To establish a connection to the database, call
112 Bio::DB::SeqFeature::Store-E<gt>new(-adaptor=E<gt>'DBI::Pg',@more_args). The
113 additional arguments are as follows:
115 Argument name Description
116 ------------- -----------
118 -dsn The database name. You can abbreviate
119 "dbi:Pg:foo" as "foo" if you wish.
121 -user Username for authentication.
123 -pass Password for authentication.
125 -namespace Creates a SCHEMA for the tables. This allows you
126 to have several virtual databases in the same
129 -temp Boolean flag. If true, a temporary database
130 will be created and destroyed as soon as
131 the Store object goes out of scope. (synonym -temporary)
133 -autoindex Boolean flag. If true, features in the database will be
134 reindexed every time they change. This is the default.
137 -tmpdir Directory in which to place temporary files during "fast" loading.
138 Defaults to File::Spec->tmpdir(). (synonyms -dump_dir, -dumpdir, -tmp)
140 -dbi_options A hashref to pass to DBI->connect's 4th argument, the "attributes."
141 (synonyms -options, -dbi_attr)
143 -write Pass true to open database for writing or updating.
145 If successful, a new instance of
146 Bio::DB::SeqFeature::Store::DBI::Pg will be returned.
148 In addition to the standard methods supported by all well-behaved
149 Bio::DB::SeqFeature::Store databases, several following
150 adaptor-specific methods are provided. These are described in the next
157 use base
'Bio::DB::SeqFeature::Store::DBI::mysql';
158 use Bio
::DB
::SeqFeature
::Store
::DBI
::Iterator
;
162 use Bio
::DB
::GFF
::Util
::Rearrange
'rearrange';
164 use constant DEBUG
=>0;
166 use constant MAX_INT
=> 2_147_483_647
;
167 use constant MIN_INT
=> -2_147_483_648
;
168 use constant MAX_BIN
=> 1_000_000_000
; # size of largest feature = 1 Gb
169 use constant MIN_BIN
=> 1000; # smallest bin we'll make - on a 100 Mb chromosome, there'll be 100,000 of these
172 # object initialization
174 # NOTE: most of this code can be refactored and inherited from DBI or DBI::mysql adapter
189 ) = rearrange
(['DSN',
190 ['TEMP','TEMPORARY'],
193 ['DUMP_DIR','DUMPDIR','TMP','TMPDIR'],
195 ['PASS','PASSWD','PASSWORD'],
196 ['OPTIONS','DBI_OPTIONS','DBI_ATTR'],
197 ['WRITE','WRITEABLE'],
203 $dbi_options ||= {pg_server_prepare
=> 0};
204 $writeable = 1 if $is_temporary or $dump_dir;
206 $dsn or $self->throw("Usage: ".__PACKAGE__
."->init(-dsn => \$dbh || \$dsn)");
212 $dsn = "dbi:Pg:$dsn" unless $dsn =~ /^dbi:/;
213 $dbh = DBI
->connect($dsn,$user,$pass,$dbi_options) or $self->throw($DBI::errstr
);
215 $dbh->do('set client_min_messages=warning') if $dbh;
216 $self->{'original_arguments'} = {
220 'dbh_options' => $dbi_options,
223 $self->{dbh
}->{InactiveDestroy
} = 1;
224 $self->{is_temp
} = $is_temporary;
225 $self->{writeable
} = $writeable;
226 $self->{namespace
} = $namespace || $schema || 'public';
227 $self->schema($self->{namespace
});
229 $self->default_settings;
230 $self->autoindex($autoindex) if defined $autoindex;
231 $self->dumpdir($dump_dir) if $dump_dir;
232 if ($self->is_temp) {
233 # warn "creating a temp database isn't supported";
234 #$self->init_tmp_database();
235 $self->init_database('erase');
237 $self->init_database('erase');
241 sub table_definitions
{
246 id serial primary key,
251 strand int default 0,
254 indexed int default 1,
255 object bytea not null
257 CREATE INDEX feature_stuff ON feature(seqid,tier,bin,typeid);
258 CREATE INDEX feature_typeid ON feature(typeid);
261 locationlist
=> <<END,
263 id serial primary key,
264 seqname text not null
265 ); CREATE INDEX locationlist_seqname ON locationlist(seqname);
270 id serial primary key,
272 ); CREATE INDEX typelist_tab ON typelist(tag);
278 display_name int default 0
280 CREATE INDEX name_id ON name( id );
281 CREATE INDEX name_name ON name( name );
282 CREATE INDEX name_lcname ON name( lower(name) );
283 CREATE INDEX name_lcname_varchar_patt_ops ON name USING BTREE (lower(name) varchar_pattern_ops);
289 attribute_id int not null,
292 CREATE INDEX attribute_id ON attribute(id);
293 CREATE INDEX attribute_id_val ON attribute(attribute_id,SUBSTR(attribute_value, 1, 10));
296 attributelist
=> <<END,
298 id serial primary key,
301 CREATE INDEX attributelist_tag ON attributelist(tag);
303 parent2child
=> <<END,
308 CREATE UNIQUE INDEX parent2child_id_child ON parent2child(id,child);
313 name text primary key,
320 "offset" int not null,
322 primary key(id,"offset")
326 interval_stats
=> <<END,
331 cum_count int not null
333 CREATE UNIQUE INDEX interval_stats_id ON interval_stats(typeid,seqid,bin);
339 my ($self, $schema) = @_;
340 $self->{'schema'} = $schema if defined($schema);
342 $self->_check_for_namespace();
343 $self->dbh->do("SET search_path TO " . $self->{'schema'} );
345 $self->dbh->do("SET search_path TO public");
347 return $self->{'schema'};
350 # wipe database clean and reinstall schema
356 my $dbh = $self->dbh;
357 my $namespace = $self->namespace;
358 my $tables = $self->table_definitions;
359 my $temporary = $self->is_temp ?
'TEMPORARY' : '';
360 foreach (keys %$tables) {
361 next if $_ eq 'meta'; # don't get rid of meta data!
362 my $table = $self->_qualify($_);
363 $dbh->do("DROP TABLE IF EXISTS $table") if $erase;
364 my @table_exists = $dbh->selectrow_array("SELECT * FROM pg_tables WHERE tablename = '$table' AND schemaname = '$self->namespace'");
365 if (!scalar(@table_exists)) {
366 my $query = "CREATE $temporary TABLE $table $tables->{$_}";
367 $dbh->do($query) or $self->throw($dbh->errstr);
370 $self->subfeatures_are_indexed(1) if $erase;
374 sub maybe_create_meta
{
376 return unless $self->writeable;
377 my $namespace = $self->namespace;
378 my $table = $self->_qualify('meta');
379 my $tables = $self->table_definitions;
380 my $temporary = $self->is_temp ?
'TEMPORARY' : '';
381 my @table_exists = $self->dbh->selectrow_array("SELECT * FROM pg_tables WHERE tablename = 'meta' AND schemaname = '$namespace'");
382 $self->dbh->do("CREATE $temporary TABLE $table $tables->{meta}")
383 unless @table_exists;
387 # check if the namespace/schema exists, if not create it
390 sub _check_for_namespace
{
392 my $namespace = $self->namespace;
393 return if $namespace eq 'public';
394 my $dbh = $self->dbh;
395 my @schema_exists = $dbh->selectrow_array("SELECT * FROM pg_namespace WHERE nspname = '$namespace'");
396 if (!scalar(@schema_exists)) {
397 my $query = "CREATE SCHEMA $namespace";
398 $dbh->do($query) or $self->throw($dbh->errstr);
400 # if temp parameter is set and schema created for this process then enable removal in remove_namespace()
401 if ($self->is_temp) {
402 $self->{delete_schema
} = 1;
408 # Overiding inherited mysql _qualify (We do not need to qualify for PostgreSQL as we have set the search_path above)
412 my $table_name = shift;
417 # when is_temp is set and the schema did not exist beforehand then we are able to remove it
419 sub remove_namespace
{
421 if ($self->{delete_schema
}) {
422 my $namespace = $self->namespace;
423 $self->dbh->do("DROP SCHEMA $namespace") or $self->throw($self->dbh->errstr);
427 ####Overiding the inherited mysql function _prepare
432 my $dbh = $self->dbh;
433 my $schema = $self->{namespace
};
436 $self->_check_for_namespace();
437 $dbh->do("SET search_path TO " . $self->{'schema'} );
439 $dbh->do("SET search_path TO public");
441 my $sth = $dbh->prepare_cached($query, {}, 3) or
442 $self->throw($dbh->errstr);
446 sub _finish_bulk_update
{
448 my $dbh = $self->dbh;
449 my $dir = $self->{dumpdir
} || '.';
450 for my $table ('feature',$self->index_tables) {
451 my $fh = $self->dump_filehandle($table);
452 my $path = $self->dump_path($table);
454 my $qualified_table = $self->_qualify($table);
455 system "cp $path $path.bak";
456 # Get stuff from file into STDIN so we don't have to be superuser
457 open my $FH, '<', $path or $self->throw("Could not read file '$path': $!");
458 print STDERR
"Loading file $path\n";
459 $dbh->do("COPY $qualified_table FROM STDIN CSV QUOTE '''' DELIMITER '\t'") or $self->throw($dbh->errstr);
460 while (my $line = <$FH>) {
461 $dbh->pg_putline($line);
463 $dbh->pg_endcopy() or $self->throw($dbh->errstr);
467 delete $self->{bulk_update_in_progress
};
468 delete $self->{filehandles
};
473 # Add a subparts to a feature. Both feature and all subparts must already be in database.
475 sub _add_SeqFeature
{
478 # special purpose method for case when we are doing a bulk update
479 return $self->_dump_add_SeqFeature(@_) if $self->{bulk_update_in_progress
};
484 my $dbh = $self->dbh;
485 local $dbh->{RaiseError
} = 1;
487 my $child_table = $self->_parent2child_table();
490 my $querydel = "DELETE FROM $child_table WHERE id = ? AND child = ?";
491 my $query = "INSERT INTO $child_table (id,child) VALUES (?,?)";
492 my $sthdel = $self->_prepare($querydel);
493 my $sth = $self->_prepare($query);
495 my $parent_id = (ref $parent ?
$parent->primary_id : $parent)
496 or $self->throw("$parent should have a primary_id");
498 $self->begin_work or $self->throw($dbh->errstr);
500 for my $child (@children) {
501 my $child_id = ref $child ?
$child->primary_id : $child;
502 defined $child_id or die "no primary ID known for $child";
503 $sthdel->execute($parent_id, $child_id);
504 $sth->execute($parent_id,$child_id);
510 warn "Transaction aborted because $@";
520 # because this is a reserved word in postgresql
522 # get primary sequence between start and end
524 sub _fetch_sequence
{
526 my ($seqid,$start,$end) = @_;
528 # backward compatibility to the old days when I liked reverse complementing
529 # dna by specifying $start > $end
531 if (defined $start && defined $end && $start > $end) {
533 ($start,$end) = ($end,$start);
535 $start-- if defined $start;
536 $end-- if defined $end;
538 my $offset1 = $self->_offset_boundary($seqid,$start || 'left');
539 my $offset2 = $self->_offset_boundary($seqid,$end || 'right');
540 my $sequence_table = $self->_sequence_table;
541 my $locationlist_table = $self->_locationlist_table;
543 my $sth = $self->_prepare(<<END);
544 SELECT sequence,"offset"
545 FROM $sequence_table as s,$locationlist_table as ll
554 $sth->execute($seqid,$offset1,$offset2) or $self->throw($sth->errstr);
556 while (my($frag,$offset) = $sth->fetchrow_array) {
557 substr($frag,0,$start-$offset) = '' if defined $start && $start > $offset;
560 substr($seq,$end-$start+1) = '' if defined $end && $end-$start+1 < length($seq);
563 $seq =~ tr/gatcGATC/ctagCTAG/;
569 sub _offset_boundary
{
571 my ($seqid,$position) = @_;
573 my $sequence_table = $self->_sequence_table;
574 my $locationlist_table = $self->_locationlist_table;
577 $sql = $position eq 'left' ?
"SELECT min(\"offset\") FROM $sequence_table as s,$locationlist_table as ll WHERE s.id=ll.id AND ll.seqname=?"
578 :$position eq 'right' ?
"SELECT max(\"offset\") FROM $sequence_table as s,$locationlist_table as ll WHERE s.id=ll.id AND ll.seqname=?"
579 :"SELECT max(\"offset\") FROM $sequence_table as s,$locationlist_table as ll WHERE s.id=ll.id AND ll.seqname=? AND \"offset\"<=?";
581 my $sth = $self->_prepare($sql);
582 my @args = $position =~ /^-?\d+$/ ?
($seqid,$position) : ($seqid);
583 $sth->execute(@args) or $self->throw($sth->errstr);
584 my $boundary = $sth->fetchall_arrayref->[0][0];
591 my ($name,$allow_aliases,$join) = @_;
592 my $name_table = $self->_name_table;
594 my $from = "$name_table as n";
595 my ($match,$string) = $self->_match_sql($name);
597 my $where = "n.id=$join AND lower(n.name) $match";
598 $where .= " AND n.display_name>0" unless $allow_aliases;
599 return ($from,$where,'',$string);
602 sub _search_attributes
{
604 my ($search_string,$attribute_names,$limit) = @_;
605 my @words = map {quotemeta($_)} split /\s+/,$search_string;
606 my $name_table = $self->_name_table;
607 my $attribute_table = $self->_attribute_table;
608 my $attributelist_table = $self->_attributelist_table;
609 my $type_table = $self->_type_table;
610 my $typelist_table = $self->_typelist_table;
612 my @tags = @
$attribute_names;
613 my $tag_sql = join ' OR ',("al.tag=?") x
@tags;
615 my $perl_regexp = join '|',@words;
617 my @wild_card_words = map { "%$_%" } @words;
618 my $sql_regexp = join ' OR ',("a.attribute_value SIMILAR TO ?") x
@words;
620 SELECT name,attribute_value,tl.tag,n.id
621 FROM $name_table as n,$attribute_table as a,$attributelist_table as al,$type_table as t,$typelist_table as tl
623 AND al.id=a.attribute_id
630 $sql .= "LIMIT $limit" if defined $limit;
631 $self->_print_query($sql,@tags,@wild_card_words) if DEBUG
|| $self->debug;
632 my $sth = $self->_prepare($sql);
633 $sth->execute(@tags,@wild_card_words) or $self->throw($sth->errstr);
636 while (my($name,$value,$type,$id) = $sth->fetchrow_array) {
637 my (@hits) = $value =~ /$perl_regexp/ig;
638 my @words_in_row = split /\b/,$value;
639 my $score = int(@hits*100/@words/@words_in_row);
640 push @results,[$name,$value,$score,$type,$id];
643 @results = sort {$b->[2]<=>$a->[2]} @results;
647 # overridden here because the mysql adapter uses
648 # a non-standard query hint
649 sub _attributes_sql
{
651 my ($attributes,$join) = @_;
653 my ($wf,@bind_args) = $self->_make_attribute_where('a','al',$attributes);
654 my ($group_by,@group_args)= $self->_make_attribute_group('a',$attributes);
656 my $attribute_table = $self->_attribute_table;
657 my $attributelist_table = $self->_attributelist_table;
659 my $from = "$attribute_table as a, $attributelist_table as al";
663 AND a.attribute_id=al.id
667 my $group = $group_by;
669 my @args = (@bind_args,@group_args);
670 return ($from,$where,$group,@args);
678 if ($name =~ /(?:^|[^\\])[*?]/) {
679 $name =~ s/(^|[^\\])([%_])/$1\\$2/g;
680 $name =~ s/(^|[^\\])\*/$1%/g;
681 $name =~ s/(^|[^\\])\?/$1_/g;
685 $match = "= lower(?)";
688 return ($match,$string);
691 # overridden because of differences between LIKE behavior in mysql and postgres
692 # as well as case-sensitivity of matches
695 my ($types,$type_table) = @_;
696 my ($primary_tag,$source_tag);
698 my @types = ref $types eq 'ARRAY' ? @
$types : $types;
700 my $typelist = $self->_typelist_table;
701 my $from = "$typelist AS tl";
705 for my $type (@types) {
707 if (ref $type && $type->isa('Bio::DB::GFF::Typename')) {
708 $primary_tag = $type->method;
709 $source_tag = $type->source;
711 ($primary_tag,$source_tag) = split ':',$type,2;
715 push @matches,"lower(tl.tag)=lower(?)";
716 push @args,"$primary_tag:$source_tag";
718 push @matches,"tl.tag ILIKE ?";
719 push @args,"$primary_tag:%";
722 my $matches = join ' OR ',@matches;
725 tl.id=$type_table.typeid
729 return ($from,$where,'',@args);
732 # overridden because mysql adapter uses the non-standard REPLACE syntax
735 my ($variable_name,$value) = @_;
736 my $meta = $self->_meta_table;
738 if (defined $value && $self->writeable) {
739 my $querydel = "DELETE FROM $meta WHERE name = ?";
740 my $query = "INSERT INTO $meta (name,value) VALUES (?,?)";
741 my $sthdel = $self->_prepare($querydel);
742 my $sth = $self->_prepare($query);
743 $sthdel->execute($variable_name);
744 $sth->execute($variable_name,$value) or $self->throw($sth->errstr);
746 $self->{settings_cache
}{$variable_name} = $value;
749 return $self->{settings_cache
}{$variable_name} if exists $self->{settings_cache
}{$variable_name};
750 my $query = "SELECT value FROM $meta as m WHERE m.name=?";
751 my $sth = $self->_prepare($query);
752 # $sth->execute($variable_name) or $self->throw($sth->errstr);
753 unless ($sth->execute($variable_name)) {
754 my $errstr = $sth->errstr;
755 $sth = $self->_prepare("SHOW search_path");
757 $errstr .= "With search_path " . $sth->fetchrow_arrayref->[0] . "\n";
758 $self->throw($errstr);
761 my ($value) = $sth->fetchrow_array;
763 return $self->{settings_cache
}{$variable_name} = $value;
767 # overridden because of use of REPLACE in mysql adapter
769 # Replace Bio::SeqFeatureI into database.
774 my $index_flag = shift || undef;
776 # ?? shouldn't need to do this
777 # $self->_load_class($object);
778 my $id = $object->primary_id;
779 my $features = $self->_feature_table;
781 my $query = "INSERT INTO $features (id,object,indexed,seqid,start,\"end\",strand,tier,bin,typeid) VALUES (?,?,?,?,?,?,?,?,?,?)";
782 my $query_noid = "INSERT INTO $features (object,indexed,seqid,start,\"end\",strand,tier,bin,typeid) VALUES (?,?,?,?,?,?,?,?,?)";
783 my $querydel = "DELETE FROM $features WHERE id = ?";
785 my $sthdel = $self->_prepare($querydel);
786 my $sth = $self->_prepare($query);
787 my $sth_noid = $self->_prepare($query_noid);
789 my @location = $index_flag ?
$self->_get_location_and_bin($object) : (undef)x6
;
791 my $primary_tag = $object->primary_tag;
792 my $source_tag = $object->source_tag || '';
793 $primary_tag .= ":$source_tag";
794 my $typeid = $self->_typeid($primary_tag,1);
797 $sthdel->execute($id);
798 $sth->execute($id,encode_base64
($self->freeze($object), ''),$index_flag||0,@location,$typeid) or $self->throw($sth->errstr);
800 $sth_noid->execute(encode_base64
($self->freeze($object), ''),$index_flag||0,@location,$typeid) or $self->throw($sth->errstr);
803 my $dbh = $self->dbh;
805 $object->primary_id($dbh->last_insert_id(undef, undef, undef, undef, {sequence
=>$features."_id_seq"})) unless defined $id;
807 $self->flag_for_indexing($dbh->last_insert_id(undef, undef, undef, undef, {sequence
=>$features."_id_seq"})) if $self->{bulk_update_in_progress
};
813 Usage : @type_list = $db->types
814 Function: Get all the types in the database
815 Returns : array of Bio::DB::GFF::Typename objects
821 # overridden because "offset" is reserved in postgres
823 # Insert a bit of DNA or protein into the database
825 sub _insert_sequence
{
827 my ($seqid,$seq,$offset) = @_;
828 my $id = $self->_locationid($seqid);
829 my $seqtable = $self->_sequence_table;
830 my $sthdel = $self->_prepare("DELETE FROM $seqtable WHERE id = ? AND \"offset\" = ?");
831 my $sth = $self->_prepare(<<END);
832 INSERT INTO $seqtable (id,"offset",sequence) VALUES (?,?,?)
834 $sthdel->execute($id,$offset);
835 $sth->execute($id,$offset,$seq) or $self->throw($sth->errstr);
838 # overridden because of mysql adapter's use of REPLACE
840 # This subroutine flags the given primary ID for later reindexing
842 sub flag_for_indexing
{
845 my $needs_updating = $self->_update_table;
847 my $querydel = "DELETE FROM $needs_updating WHERE id = ?";
848 my $query = "INSERT INTO $needs_updating VALUES (?)";
849 my $sthdel = $self->_prepare($querydel);
850 my $sth = $self->_prepare($query);
852 $sthdel->execute($id);
853 $sth->execute($id) or $self->throw($self->dbh->errstr);
856 # overridden because of the different ways that mysql and postgres
857 # handle id sequences
860 my ($table,$namefield,$name,$add_if_missing) = @_;
861 my $qualified_table = $self->_qualify($table);
862 my $sth = $self->_prepare(<<END);
863 SELECT id FROM $qualified_table WHERE $namefield=?
865 $sth->execute($name) or die $sth->errstr;
866 my ($id) = $sth->fetchrow_array;
868 return $id if defined $id;
869 return unless $add_if_missing;
871 $sth = $self->_prepare(<<END);
872 INSERT INTO $qualified_table ($namefield) VALUES (?)
874 $sth->execute($name) or die $sth->errstr;
875 my $dbh = $self->dbh;
876 return $dbh->last_insert_id(undef, undef, undef, undef, {sequence
=>$qualified_table."_id_seq"});
879 # overridden because of differences in binding between mysql and postgres adapters
880 # given a statement handler that is expected to return rows of (id,object)
881 # unthaw each object and return a list of 'em
887 $sth->bind_col(1, \
$id);
888 $sth->bind_col(2, \
$o, { pg_type
=> PG_BYTEA
});
889 #while (my ($id,$o) = $sth->fetchrow_array) {
890 while ($sth->fetch) {
891 my $obj = $self->thaw(decode_base64
($o) ,$id);
898 # given a statement handler that is expected to return rows of (id,object)
899 # unthaw each object and return a list of 'em
903 my ($id,$o) = $sth->fetchrow_array;
905 my $obj = $self->thaw(decode_base64
($o) ,$id);
909 ####################################################################################################
910 # SQL Fragment generators
911 ####################################################################################################
913 # overridden because of base64 encoding needed here
915 # special-purpose store for bulk loading - write to a file rather than to the db
922 my $store_fh = $self->dump_filehandle('feature');
923 my $dbh = $self->dbh;
925 my $autoindex = $self->autoindex;
928 my $id = $self->next_id;
929 my ($seqid,$start,$end,$strand,$tier,$bin) = $indexed ?
$self->_get_location_and_bin($obj) : (undef)x6
;
930 my $primary_tag = $obj->primary_tag;
931 my $source_tag = $obj->source_tag || '';
932 $primary_tag .= ":$source_tag";
933 my $typeid = $self->_typeid($primary_tag,1);
935 my $frozen_object = encode_base64
($self->freeze($obj), '');
936 # TODO: Fix this, why does frozen object start with quote but not end with one
937 print $store_fh join("\t",$id,$typeid,$seqid,$start,$end,$strand,$tier,$bin,$indexed,$frozen_object),"\n";
938 $obj->primary_id($id);
939 $self->_update_indexes($obj) if $indexed && $autoindex;
943 # remember whether we are have ever stored a non-indexed feature
944 unless ($indexed or $self->{indexed_flag
}++) {
945 $self->subfeatures_are_indexed(0);
950 sub _enable_keys
{ } # nullop
951 sub _disable_keys
{ } # nullop
953 sub _add_interval_stats_table
{
955 my $tables = $self->table_definitions;
956 my $interval_stats = $self->_interval_stats_table;
957 ##check to see if it exists yet; if it does, just return because
958 ##there is a drop from in the next step
959 my $dbh = $self->dbh;
960 my @table_exists = $dbh->selectrow_array("SELECT * FROM pg_tables WHERE tablename
961 = '$interval_stats' AND schemaname = '".$self->namespace."'");
962 if (!scalar(@table_exists)) {
963 my $query = "CREATE TABLE $interval_stats $tables->{interval_stats}";
964 $dbh->do($query) or $self->throw($dbh->errstr);
968 sub _fetch_indexed_features_sql
{
970 my $features = $self->_feature_table;
972 SELECT typeid,seqid,start-1,"end"
975 ORDER BY typeid,seqid,start