2 import os
, pickle
, sys
, re
, datetime
, UserDict
3 from StringIO
import StringIO
4 from mapping
import Collection
,Mapping
,Graph
5 from classutil
import standard_invert
,get_bound_subclass
,SourceFileName
6 from coordinator
import XMLRPCServerBase
11 except NameError: # DEFAULT LIST OF CLASSES NOT PORTABLE TO REMOTE CLIENTS
12 nonPortableClasses
= [SourceFileName
]
15 class OneTimeDescriptor(object):
16 'provides shadow attribute based on schema'
17 def __init__(self
, attrName
, mdb
, **kwargs
):
20 def __get__(self
, obj
, objtype
):
22 resID
= obj
._persistent
_id
# GET ITS RESOURCE ID
23 except AttributeError:
24 raise AttributeError('attempt to access worldbase attr on non-worldbase object')
25 target
= self
.mdb
.get_schema_attr(resID
, self
.attr
) #get from mdb
26 obj
.__dict
__[self
.attr
] = target
# save in __dict__ to evade __setattr__
29 class ItemDescriptor(object):
30 'provides shadow attribute for items in a db, based on schema'
31 def __init__(self
, attrName
, mdb
, invert
=False, getEdges
=False,
32 mapAttr
=None, targetAttr
=None, uniqueMapping
=False, **kwargs
):
36 self
.getEdges
= getEdges
37 self
.mapAttr
= mapAttr
38 self
.targetAttr
= targetAttr
39 self
.uniqueMapping
= uniqueMapping
40 def get_target(self
, obj
):
41 'return the mapping object for this schema relation'
43 resID
= obj
.db
._persistent
_id
# GET RESOURCE ID OF DATABASE
44 except AttributeError:
45 raise AttributeError('attempt to access worldbase attr on non-worldbase object')
46 targetDict
= self
.mdb
.get_schema_attr(resID
, self
.attr
)
48 targetDict
= ~targetDict
50 targetDict
= targetDict
.edges
52 def __get__(self
, obj
, objtype
):
53 targetDict
= self
.get_target(obj
)
54 if self
.mapAttr
is not None: # USE mapAttr TO GET ID FOR MAPPING obj
55 obj_id
= getattr(obj
,self
.mapAttr
)
56 if obj_id
is None: # None MAPS TO None, SO RETURN IMMEDIATELY
57 return None # DON'T BOTHER CACHING THIS
58 result
=targetDict
[obj_id
] # MAP USING THE SPECIFIED MAPPING
60 result
=targetDict
[obj
] # NOW PERFORM MAPPING IN THAT RESOURCE...
61 if self
.targetAttr
is not None:
62 result
=getattr(result
,self
.targetAttr
) # GET ATTRIBUTE OF THE result
63 obj
.__dict
__[self
.attr
]=result
# CACHE IN THE __dict__
68 class ItemDescriptorRW(ItemDescriptor
):
69 def __set__(self
,obj
,newTarget
):
70 if not self
.uniqueMapping
:
71 raise WorldbaseSchemaError('''You attempted to directly assign to a graph mapping
72 (x.graph = y)! Instead, treat the graph like a dictionary: x.graph[y] = edgeInfo''')
73 targetDict
= self
.get_target(obj
)
74 targetDict
[obj
] = newTarget
75 obj
.__dict
__[self
.attr
] = newTarget
# CACHE IN THE __dict__
78 class ForwardingDescriptor(object):
79 'forward an attribute request to item from another container'
80 def __init__(self
,targetDB
,attr
):
81 self
.targetDB
=targetDB
# CONTAINER TO GET ITEMS FROM
82 self
.attr
=attr
# ATTRIBUTE TO MAP TO
83 def __get__(self
,obj
,objtype
):
84 target
=self
.targetDB
[obj
.id] # GET target FROM CONTAINER
85 return getattr(target
,self
.attr
) # GET DESIRED ATTRIBUTE
87 class SpecialMethodDescriptor(object):
88 'enables shadowing of special methods like __invert__'
89 def __init__(self
,attrName
):
91 def __get__(self
,obj
,objtype
):
93 return obj
.__dict
__[self
.attr
]
95 raise AttributeError('%s has no method %s'%(obj
,self
.attr
))
97 def addSpecialMethod(obj
,attr
,f
):
98 '''bind function f as special method attr on obj.
99 obj cannot be an builtin or extension class
100 (if so, just subclass it)'''
102 m
=new
.instancemethod(f
,obj
,obj
.__class
__)
104 if getattr(obj
,attr
) == m
: # ALREADY BOUND TO f
105 return # ALREADY BOUND, NOTHING FURTHER TO DO
106 except AttributeError:
109 raise AttributeError('%s already bound to a different function' %attr
)
110 setattr(obj
,attr
,m
) # SAVE BOUND METHOD TO __dict__
111 setattr(obj
.__class
__,attr
,SpecialMethodDescriptor(attr
)) # DOES FORWARDING
113 def getInverseDB(self
):
114 'default shadow __invert__ method'
115 return self
.inverseDB
# TRIGGER CONSTRUCTION OF THE TARGET RESOURCE
118 class WorldbaseNotPortableError(ValueError):
119 'indicates that object has a local data dependency and cannnot be transferred to a remote client'
121 class WorldbaseNotFoundError(KeyError):
122 'unable to find a loadable resource for the requested worldbase identifier from WORLDBASEPATH'
124 class WorldbaseMismatchError(ValueError):
125 '_persistent_id attr on object no longer matches its assigned worldbase ID?!?'
127 class WorldbaseEmptyError(ValueError):
128 "user hasn't queued anything, so trying to save or rollback is an error"
130 class WorldbaseReadOnlyError(ValueError):
131 'attempt to write data to a read-only resource database'
133 class WorldbaseSchemaError(ValueError):
134 "attempt to set attribute to an object not in the database bound by schema"
137 class WorldbaseNoModuleError(pickle
.PickleError
):
138 'attempt to pickle a class from a non-importable module'
141 class PygrPickler(pickle
.Pickler
):
142 def persistent_id(self
,obj
):
143 'convert objects with _persistent_id to PYGR_ID strings during pickling'
145 try: # check for unpicklable class (i.e. not loaded via a module import)
146 if isinstance(obj
, types
.TypeType
) and obj
.__module
__ == '__main__':
147 raise WorldbaseNoModuleError('''You cannot pickle a class from __main__!
148 To make this class (%s) picklable, it must be loaded via a regular import
149 statement.''' % obj
.__name
__)
150 except AttributeError:
153 if not isinstance(obj
,types
.TypeType
) and obj
is not self
.root
:
155 return 'PYGR_ID:%s' % self
.sourceIDs
[id(obj
)]
157 if obj
._persistent
_id
is not None:
158 return 'PYGR_ID:%s' % obj
._persistent
_id
159 except AttributeError:
161 for klass
in self
.badClasses
: # CHECK FOR LOCAL DEPENDENCIES
162 if isinstance(obj
,klass
):
163 raise WorldbaseNotPortableError('this object has a local data dependency and cannnot be transferred to a remote client')
165 def setRoot(self
,obj
,sourceIDs
={},badClasses
=()):
166 'set obj as root of pickling tree: genuinely pickle it (not just its id)'
168 self
.sourceIDs
=sourceIDs
169 self
.badClasses
= badClasses
172 class MetabaseServer(object):
173 'simple XMLRPC resource database server'
174 xmlrpc_methods
={'getResource':0,'registerServer':0,'delResource':0,
175 'getName':0,'dir':0,'get_version':0}
176 _pygr_data_version
=(0,1,0)
177 def __init__(self
,name
,readOnly
=True,downloadDB
=None):
182 self
.downloadDocs
= {}
183 if readOnly
: # LOCK THE INDEX. DON'T ACCEPT FOREIGN DATA!!
184 self
.xmlrpc_methods
={'getResource':0,'getName':0,'dir':0,
185 'get_version':0} # ONLY ALLOW THESE METHODS!
186 if downloadDB
is not None:
187 self
.read_download_db(downloadDB
)
188 def read_download_db(self
,filename
,location
='default'):
189 'add the designated resource DB shelve to our downloadable resources'
190 d
= dbfile
.shelve_open(filename
,'r')
191 for k
,v
in d
.items():
192 if k
.startswith('__doc__.'): # SAVE DOC INFO FOR THIS ID
193 self
.downloadDocs
[k
[8:]] = v
194 else: # SAVE OBJECT INFO
195 self
.downloadDB
.setdefault(k
,{})[location
] = v
198 'return layer name for this server'
200 def get_db(self
,download
):
201 if download
: # USE SEPARATE DOWNLOAD DATABASE
202 return (self
.downloadDB
, self
.downloadDocs
)
203 else: # USE REGULAR XMLRPC SERVICES DATABASE
204 return (self
.d
, self
.docs
)
205 def getResource(self
,id,download
=False):
206 'return dict of location:pickleData for requested ID'
207 db
,docs
= self
.get_db(download
)
209 d
= db
[id] # RETURN DICT OF PICKLED OBJECTS
211 return '' # EMPTY STRING INDICATES FAILURE
212 if id.startswith('SCHEMA.'): # THIS IS A REQUEST FOR SCHEMA INFO
213 for location
in d
: # -schemaEdge DATA NOT SENDABLE BY XMLRPC
215 del d
[location
]['-schemaEdge']
218 else: # THIS IS A REGULAR RESOURCE REQUEST
219 try: # PASS ITS DOCSTRING AS A SPECIAL ENTRY
220 d
['__doc__'] = docs
[id]['__doc__']
224 def registerServer(self
,locationKey
,serviceDict
):
225 'add services in serviceDict to this server under the specified location'
227 for id,(infoDict
,pdata
) in serviceDict
.items():
228 self
.d
.setdefault(id,{})[locationKey
] = pdata
# SAVE RESOURCE
229 if infoDict
is not None:
230 self
.docs
[id]=infoDict
232 return n
# COUNT OF SUCCESSFULLY REGISTERED SERVICES
233 def delResource(self
,id,locationKey
):
234 'delete the specified resource under the specified location'
236 del self
.d
[id][locationKey
]
237 if len(self
.d
[id])==0:
241 return '' # DUMMY RETURN VALUE FOR XMLRPC
242 def dir(self
, pattern
, asDict
=False, matchType
='p', download
=False):
243 'return list or dict of resources matching the specified string'
244 db
,docs
= self
.get_db(download
)
246 pattern
= re
.compile(pattern
)
248 for name
in db
: # FIND ALL ITEMS WITH MATCHING NAME
249 if matchType
== 'p' and name
.startswith(pattern
) or matchType
== 'r' and pattern
.search(name
):
251 if asDict
: # RETURN INFO DICT FOR EACH ITEM
254 d
[name
] = docs
.get(name
,{})
257 def get_version(self
):
258 return self
._pygr
_data
_version
261 def raise_illegal_save(self
,*l
):
262 raise WorldbaseReadOnlyError('''You cannot save data to a remote XMLRPC server.
263 Give a user-editable resource database as the first entry in your WORLDBASEPATH!''')
266 class XMLRPCMetabase(object):
267 'client interface to remote XMLRPC resource database'
268 def __init__(self
, url
, mdb
, **kwargs
):
269 from coordinator
import get_connection
270 self
.server
=get_connection(url
,'index')
273 self
.zoneName
= self
.server
.getName()
274 self
.writeable
= False
275 def find_resource(self
,id,download
=False):
276 'get pickledata,docstring for this resource ID from server'
277 if download
: # SPECIFICALLY ASK SERVER FOR DOWNLOADABLE RESOURCES
278 d
= self
.server
.getResource(id,download
)
279 else: # NORMAL MODE TO GET XMLRPC SERVICES
280 d
=self
.server
.getResource(id)
282 raise WorldbaseNotFoundError('resource %s not found'%id)
284 docstring
= d
['__doc__']
288 for location
,objdata
in d
.items(): # return the first resource found
289 return objdata
, docstring
290 raise KeyError('unable to find %s from remote services' % id)
291 def registerServer(self
,locationKey
,serviceDict
):
292 'forward registration to the server'
293 return self
.server
.registerServer(locationKey
,serviceDict
)
294 def getschema(self
,id):
295 'return dict of {attr:{args}}'
296 d
=self
.server
.getResource('SCHEMA.'+id)
297 if d
=='': # NO SCHEMA INFORMATION FOUND
299 for schemaDict
in d
.values():
300 return schemaDict
# HAND BACK FIRST SCHEMA WE FIND
302 def dir(self
,pattern
,matchType
='p',asDict
=False,download
=False):
303 'return list or dict of resources matching the specified string'
305 return self
.server
.dir(pattern
, asDict
, matchType
, download
)
307 return self
.server
.dir(pattern
, asDict
, matchType
)
308 __setitem__
= raise_illegal_save
# RAISE USEFUL EXPLANATORY ERROR MESSAGE
309 __delitem__
= raise_illegal_save
310 setschema
= raise_illegal_save
311 delschema
= raise_illegal_save
315 class MySQLMetabase(object):
316 '''To create a new resource table, call:
317 MySQLMetabase("DBNAME.TABLENAME", mdb, createLayer="LAYERNAME")
318 where DBNAME is the name of your database, TABLENAME is the name of the
319 table you want to create, and LAYERNAME is the layer name you want to assign it'''
320 _pygr_data_version
=(0,1,0)
321 def __init__(self
, tablename
, mdb
, createLayer
=None, newZone
=None, **kwargs
):
322 from sqlgraph
import get_name_cursor
,SQLGraph
323 self
.tablename
,self
.cursor
,self
.serverInfo
= get_name_cursor(tablename
)
325 self
.writeable
= True
327 schemaTable
= self
.tablename
+'_schema' # SEPARATE TABLE FOR SCHEMA GRAPH
328 if createLayer
is None:
329 createLayer
= newZone
# use the new parameter
330 if createLayer
is not None: # CREATE DATABASE FROM SCRATCH
331 creation_time
= datetime
.datetime
.now()
332 self
.cursor
.execute('drop table if exists %s' % self
.tablename
)
333 self
.cursor
.execute('create table %s (pygr_id varchar(255) not null,location varchar(255) not null,docstring varchar(255),user varchar(255),creation_time datetime,pickle_size int,security_code bigint,info_blob text,objdata text not null,unique(pygr_id,location))'%self
.tablename
)
334 self
.cursor
.execute('insert into %s (pygr_id,location,creation_time,objdata) values (%%s,%%s,%%s,%%s)'
336 ('PYGRLAYERNAME',createLayer
,creation_time
,'a'))
337 self
.cursor
.execute('insert into %s (pygr_id,location,objdata) values (%%s,%%s,%%s)'
339 ('0version','%d.%d.%d' % self
._pygr
_data
_version
,
340 'a')) # SAVE VERSION STAMP
341 self
.zoneName
= createLayer
342 self
.cursor
.execute('drop table if exists %s' % schemaTable
)
343 self
.cursor
.execute('create table %s (source_id varchar(255) not null,target_id varchar(255),edge_id varchar(255),unique(source_id,target_id))' % schemaTable
)
346 n
= self
.cursor
.execute('select location from %s where pygr_id=%%s'
347 % self
.tablename
,('PYGRLAYERNAME',))
348 except StandardError:
349 print >>sys
.stderr
,'''%s
350 Database table %s appears to be missing or has no layer name!
351 To create this table, call worldbase.MySQLMetabase("%s",createLayer=<LAYERNAME>)
352 where <LAYERNAME> is the layer name you want to assign it.
353 %s''' %('!'*40,self
.tablename
,self
.tablename
,'!'*40)
356 self
.zoneName
= self
.cursor
.fetchone()[0] # GET LAYERNAME FROM DB
357 if self
.cursor
.execute('select location from %s where pygr_id=%%s'
358 % self
.tablename
,('0root',))>0:
359 for row
in self
.cursor
.fetchall():
360 self
.rootNames
[row
[0]]=None
361 mdb
.save_root_names(self
.rootNames
)
362 self
.graph
= SQLGraph(schemaTable
,self
.cursor
,attrAlias
=
363 dict(source_id
='source_id',target_id
='target_id',
364 edge_id
='edge_id'),simpleKeys
=True,
365 unpack_edge
=SchemaEdge(self
))
366 def save_root_name(self
,name
):
367 self
.rootNames
[name
]=None
368 self
.cursor
.execute('insert into %s (pygr_id,location,objdata) values (%%s,%%s,%%s)'
369 %self
.tablename
,('0root',name
,'a'))
370 def find_resource(self
,id,download
=False):
371 'get construction rule from mysql, and attempt to construct'
372 self
.cursor
.execute('select location,objdata,docstring from %s where pygr_id=%%s'
373 % self
.tablename
,(id,))
374 for location
,objdata
,docstring
in self
.cursor
.fetchall():
375 return objdata
,docstring
# return first resource found
376 raise WorldbaseNotFoundError('unable to construct %s from remote services')
377 def __setitem__(self
,id,obj
):
378 'add an object to this resource database'
379 s
= dumps(obj
) # PICKLE obj AND ITS DEPENDENCIES
380 d
= get_info_dict(obj
, s
)
381 self
.cursor
.execute('replace into %s (pygr_id,location,docstring,user,creation_time,pickle_size,objdata) values (%%s,%%s,%%s,%%s,%%s,%%s,%%s)'
383 (id,'mysql:'+self
.tablename
,obj
.__doc
__,d
['user'],
384 d
['creation_time'],d
['pickle_size'],s
))
385 root
=id.split('.')[0]
386 if root
not in self
.rootNames
:
387 self
.save_root_name(root
)
388 def __delitem__(self
,id):
389 'delete this resource and its schema rules'
390 if self
.cursor
.execute('delete from %s where pygr_id=%%s'
391 %self
.tablename
,(id,))<1:
392 raise WorldbaseNotFoundError('no resource %s in this database'%id)
393 def registerServer(self
,locationKey
,serviceDict
):
394 'register the specified services to mysql database'
396 for id,(d
,pdata
) in serviceDict
.items():
397 n
+=self
.cursor
.execute('replace into %s (pygr_id,location,docstring,user,creation_time,pickle_size,objdata) values (%%s,%%s,%%s,%%s,%%s,%%s,%%s)'
399 (id,locationKey
,d
['__doc__'],d
['user'],
400 d
['creation_time'],d
['pickle_size'],pdata
))
402 def setschema(self
,id,attr
,kwargs
):
403 'save a schema binding for id.attr --> targetID'
404 if not attr
.startswith('-'): # REAL ATTRIBUTE
405 targetID
=kwargs
['targetID'] # RAISES KeyError IF NOT PRESENT
406 kwdata
= dumps(kwargs
)
407 self
.cursor
.execute('replace into %s (pygr_id,location,objdata) values (%%s,%%s,%%s)'
408 %self
.tablename
,('SCHEMA.'+id,attr
,kwdata
))
409 def delschema(self
,id,attr
):
410 'delete schema binding for id.attr'
411 self
.cursor
.execute('delete from %s where pygr_id=%%s and location=%%s'
412 %self
.tablename
,('SCHEMA.'+id,attr
))
413 def getschema(self
,id):
414 'return dict of {attr:{args}}'
416 self
.cursor
.execute('select location,objdata from %s where pygr_id=%%s'
417 % self
.tablename
,('SCHEMA.'+id,))
418 for attr
,objData
in self
.cursor
.fetchall():
419 d
[attr
]=self
.mdb
.loads(objData
)
421 def dir(self
,pattern
,matchType
='p',asDict
=False,download
=False):
422 'return list or dict of resources matching the specified string'
425 self
.cursor
.execute('select pygr_id,docstring,user,creation_time,pickle_size from %s where pygr_id regexp %%s'
426 % self
.tablename
, (pattern
, ))
427 elif matchType
== 'p':
428 self
.cursor
.execute('select pygr_id,docstring,user,creation_time,pickle_size from %s where pygr_id like %%s'
429 % self
.tablename
,(pattern
+'%',))
431 # Exit now to avoid fetching rows with no query executed
438 for l
in self
.cursor
.fetchall():
439 d
[l
[0]] = dict(__doc__
=l
[1],user
=l
[2],creation_time
=l
[3],pickle_size
=l
[4])
443 return [name
for name
in d
]
446 class SchemaEdge(object):
447 'provides unpack_edge method for schema graph storage'
448 def __init__(self
,schemaDB
):
449 self
.schemaDB
= schemaDB
450 def __call__(self
,edgeID
):
451 'get the actual schema object describing this ID'
452 return self
.schemaDB
.getschema(edgeID
)['-schemaEdge']
454 class ResourceDBGraphDescr(object):
455 'this property provides graph interface to schema'
456 def __get__(self
,obj
,objtype
):
457 g
= Graph(filename
=obj
.dbpath
+'_schema',mode
='cw',writeNow
=True,
458 simpleKeys
=True,unpack_edge
=SchemaEdge(obj
))
462 class ShelveMetabase(object):
463 '''BerkeleyDB-based storage of worldbase resource databases, using the python
464 shelve module. Users will not need to create instances of this class themselves,
465 as worldbase automatically creates one for each appropriate entry in your
466 WORLDBASEPATH; if the corresponding database file does not already exist,
467 it is automatically created for you.'''
468 _pygr_data_version
=(0,1,0)
469 graph
= ResourceDBGraphDescr() # INTERFACE TO SCHEMA GRAPH
470 def __init__(self
, dbpath
, mdb
, mode
='r', newZone
=None, **kwargs
):
472 self
.dbpath
= os
.path
.join(dbpath
, '.pygr_data') # CONSTRUCT FILENAME
474 self
.writeable
= True # can write to this storage
476 try: # OPEN DATABASE FOR READING
477 self
.db
= dbfile
.shelve_open(self
.dbpath
, mode
)
479 mdb
.save_root_names(self
.db
['0root'])
483 self
.zoneName
= self
.db
['0zoneName']
486 except anydbm
.error
: # CREATE NEW FILE IF NEEDED
487 self
.db
= dbfile
.shelve_open(self
.dbpath
, 'c')
488 self
.db
['0version'] = self
._pygr
_data
_version
# SAVE VERSION STAMP
489 self
.db
['0root'] = {}
490 if newZone
is not None:
491 self
.db
['0zoneName'] = newZone
492 self
.zoneName
= newZone
493 def reopen(self
, mode
):
495 self
.db
= dbfile
.shelve_open(self
.dbpath
, mode
)
496 def find_resource(self
, resID
, download
=False):
497 'get an item from this resource database'
498 objdata
= self
.db
[resID
] # RAISES KeyError IF NOT PRESENT
500 return objdata
, self
.db
['__doc__.' + resID
]['__doc__']
503 def __setitem__(self
, resID
, obj
):
504 'add an object to this resource database'
505 s
= dumps(obj
) # PICKLE obj AND ITS DEPENDENCIES
506 self
.reopen('w') # OPEN BRIEFLY IN WRITE MODE
508 self
.db
[resID
] = s
# SAVE TO OUR SHELVE FILE
509 self
.db
['__doc__.' + resID
] = get_info_dict(obj
, s
)
510 root
= resID
.split('.')[0] # SEE IF ROOT NAME IS IN THIS SHELVE
511 d
= self
.db
.get('0root', {})
513 d
[root
] = None # ADD NEW ENTRY
514 self
.db
['0root'] = d
# SAVE BACK TO SHELVE
516 self
.reopen('r') # REOPEN READ-ONLY
517 def __delitem__(self
, resID
):
518 'delete this item from the database, with a modicum of safety'
519 self
.reopen('w') # OPEN BRIEFLY IN WRITE MODE
522 del self
.db
[resID
] # DELETE THE SPECIFIED RULE
524 raise WorldbaseNotFoundError('ID %s not found in %s'
525 % (resID
,self
.dbpath
))
527 del self
.db
['__doc__.' + resID
]
531 self
.reopen('r') # REOPEN READ-ONLY
532 def dir(self
, pattern
, matchType
='p',asDict
=False,download
=False):
533 'generate all item IDs matching the specified pattern'
535 pattern
= re
.compile(pattern
)
538 if matchType
== 'p' and name
.startswith(pattern
) \
539 or matchType
== 'r' and pattern
.search(name
):
544 d
[name
] = self
.db
.get('__doc__.'+name
,None)
547 def setschema(self
, resID
, attr
, kwargs
):
548 'save a schema binding for resID.attr --> targetID'
549 if not attr
.startswith('-'): # REAL ATTRIBUTE
550 targetID
= kwargs
['targetID'] # RAISES KeyError IF NOT PRESENT
551 self
.reopen('w') # OPEN BRIEFLY IN WRITE MODE
552 d
= self
.db
.get('SCHEMA.' + resID
, {})
553 d
[attr
] = kwargs
# SAVE THIS SCHEMA RULE
554 self
.db
['SCHEMA.' + resID
] = d
# FORCE shelve TO RESAVE BACK
555 self
.reopen('r') # REOPEN READ-ONLY
556 def getschema(self
, resID
):
557 'return dict of {attr:{args}}'
558 return self
.db
['SCHEMA.' + resID
]
559 def delschema(self
, resID
, attr
):
560 'delete schema binding for resID.attr'
561 self
.reopen('w') # OPEN BRIEFLY IN WRITE MODE
562 d
=self
.db
['SCHEMA.' + resID
]
564 self
.db
['SCHEMA.' + resID
] = d
# FORCE shelve TO RESAVE BACK
565 self
.reopen('r') # REOPEN READ-ONLY
567 'close the shelve file when finished'
573 def dumps(obj
, **kwargs
):
574 'pickle to string, using persistent ID encoding'
576 pickler
= PygrPickler(src
) # NEED OUR OWN PICKLER, TO USE persistent_id
577 pickler
.setRoot(obj
, **kwargs
) # ROOT OF PICKLE TREE: SAVE EVEN IF persistent_id
578 pickler
.dump(obj
) # PICKLE IT
579 return src
.getvalue() # RETURN THE PICKLED FORM AS A STRING
581 def get_info_dict(obj
, pickleString
):
582 'get dict of standard info about a resource'
583 d
= dict(creation_time
=datetime
.datetime
.now(),
584 pickle_size
=len(pickleString
),__doc__
=obj
.__doc
__)
586 d
['user'] = os
.environ
['USER']
591 class MetabaseBase(object):
592 def persistent_load(self
, persid
):
593 'check for PYGR_ID:... format and return the requested object'
594 if persid
.startswith('PYGR_ID:'):
595 return self(persid
[8:]) # RUN OUR STANDARD RESOURCE REQUEST PROCESS
596 else: # UNKNOWN PERSISTENT ID... NOT FROM PYGR!
597 raise pickle
.UnpicklingError
, 'Invalid persistent ID %s' % persid
598 def load(self
, resID
, objdata
, docstring
):
599 'load the pickled data and all its dependencies'
600 obj
= self
.loads(objdata
)
601 obj
.__doc
__ = docstring
602 if hasattr(obj
,'_saveLocalBuild') and obj
._saveLocalBuild
:
603 saver
= self
.writer
.saver
# mdb in which to record local copy
604 # SAVE AUTO BUILT RESOURCE TO LOCAL PYGR.DATA
605 hasPending
= saver
.has_pending() # any pending transaction?
606 saver
.add_resource(resID
, obj
) # add to queue for commit
607 obj
._saveLocalBuild
= False # NO NEED TO SAVE THIS AGAIN
609 print >>sys
.stderr
,'''Saving new resource %s to local worldbase...
610 You must use worldbase.commit() to commit!
611 You are seeing this message because you appear to be in the
612 middle of a worldbase transaction. Ordinarily worldbase would
613 automatically commit this new downloaded resource, but doing so
614 now would also commit your pending transaction, which you may
615 not be ready to do!''' % resID
616 else: # automatically save new resource
617 saver
.save_pending() # commit it
619 obj
._persistent
_id
= resID
# MARK WITH ITS PERSISTENT ID
620 self
.resourceCache
[resID
] = obj
# SAVE TO OUR CACHE
621 self
.bind_schema(resID
, obj
) # BIND SHADOW ATTRIBUTES IF ANY
623 def loads(self
, data
):
624 'unpickle from string, using persistent ID expansion'
626 unpickler
= pickle
.Unpickler(src
)
627 unpickler
.persistent_load
= self
.persistent_load
# WE PROVIDE PERSISTENT LOOKUP
628 obj
= unpickler
.load() # ACTUALLY UNPICKLE THE DATA
630 def __call__(self
, resID
, debug
=None, download
=None, *args
, **kwargs
):
631 'get the requested resource ID by searching all databases'
633 return self
.resourceCache
[resID
] # USE OUR CACHED OBJECT
636 debug_state
= self
.debug
# SAVE ORIGINAL STATE
637 download_state
= self
.download
638 if debug
is not None:
640 if download
is not None: # apply the specified download mode
641 self
.download
= download
642 else: # just use our current download mode
643 download
= self
.download
644 try: # finally... TO RESTORE debug STATE EVEN IF EXCEPTION OCCURS.
645 self
.update(debug
=self
.debug
, keepCurrentPath
=True) # load if empty
646 for objdata
,docstr
in self
.find_resource(resID
, download
):
648 obj
= self
.load(resID
, objdata
, docstr
)
650 except (KeyError,IOError): # NOT IN THIS DB; FILES NOT ACCESSIBLE...
651 if self
.debug
: # PASS ON THE ACTUAL ERROR IMMEDIATELY
653 finally: # RESTORE STATE BEFORE RAISING ANY EXCEPTION
654 self
.debug
= debug_state
655 self
.download
= download_state
656 self
.resourceCache
[resID
] = obj
# save to our cache
658 def bind_schema(self
, resID
, obj
):
659 'if this resource ID has any schema, bind its attrs to class'
661 schema
= self
.getschema(resID
)
663 return # NO SCHEMA FOR THIS OBJ, SO NOTHING TO DO
664 self
.resourceCache
.schemaCache
[resID
] = schema
# cache for speed
665 for attr
,rules
in schema
.items():
666 if not attr
.startswith('-'): # only bind real attributes
667 self
.bind_property(obj
, attr
, **rules
)
668 def bind_property(self
, obj
, attr
, itemRule
=False, **kwargs
):
669 'create a descriptor for the attr on the appropriate obj class'
670 try: # SEE IF OBJECT TELLS US TO SKIP THIS ATTRIBUTE
671 return obj
._ignoreShadowAttr
[attr
] # IF PRESENT, NOTHING TO DO
672 except (AttributeError,KeyError):
673 pass # PROCEED AS NORMAL
674 if itemRule
: # SHOULD BIND TO ITEMS FROM obj DATABASE
675 targetClass
= get_bound_subclass(obj
,'itemClass') # CLASS USED FOR CONSTRUCTING ITEMS
676 descr
= ItemDescriptor(attr
, self
, **kwargs
)
677 else: # SHOULD BIND DIRECTLY TO obj VIA ITS CLASS
678 targetClass
= get_bound_subclass(obj
)
679 descr
= OneTimeDescriptor(attr
, self
, **kwargs
)
680 setattr(targetClass
, attr
, descr
) # BIND descr TO targetClass.attr
682 try: # BIND TO itemSliceClass TOO, IF IT EXISTS...
683 targetClass
= get_bound_subclass(obj
,'itemSliceClass')
684 except AttributeError:
685 pass # NO itemSliceClass, SO SKIP
686 else: # BIND TO itemSliceClass
687 setattr(targetClass
, attr
, descr
)
688 if attr
== 'inverseDB': # ADD SHADOW __invert__ TO ACCESS THIS
689 addSpecialMethod(obj
, '__invert__', getInverseDB
)
690 def get_schema_attr(self
, resID
, attr
):
691 'actually retrieve the desired schema attribute'
692 try: # GET SCHEMA FROM CACHE
693 schema
= self
.resourceCache
.schemaCache
[resID
]
694 except KeyError: # HMM, IT SHOULD BE CACHED!
695 schema
= self
.getschema(resID
) # OBTAIN FROM RESOURCE DB
696 self
.resourceCache
.schemaCache
[resID
] = schema
# KEEP IT IN OUR CACHE
698 schema
= schema
[attr
] # GET SCHEMA FOR THIS SPECIFIC ATTRIBUTE
700 raise AttributeError('no worldbase schema info for %s.%s' \
702 targetID
= schema
['targetID'] # GET THE RESOURCE ID
703 return self(targetID
) # actually load the resource
704 def add_root_name(self
, name
):
705 'add name to the root of our data namespace and schema namespace'
706 getattr(self
.Data
, name
) # forces root object to add name if not present
707 getattr(self
.Schema
, name
) # forces root object to add name if not present
708 def save_root_names(self
, rootNames
):
709 'add set of names to our namespace root'
710 for name
in rootNames
:
711 self
.add_root_name(name
)
712 def clear_cache(self
):
713 'clear all resources from cache'
714 self
.resourceCache
.clear()
715 def get_writer(self
):
716 'return writeable mdb if available, or raise exception'
719 except AttributeError:
720 raise WorldbaseReadOnlyError('this metabase is read-only!')
721 def add_resource(self
, resID
, obj
=None):
722 """assign obj as the specified resource ID to our metabase.
723 if obj is None, treat resID as a dictionary whose keys are
724 resource IDs and values are the objects to save."""
726 self
.get_writer().saver
.add_resource_dict(resID
)
728 self
.get_writer().saver
.add_resource(resID
, obj
)
729 def delete_resource(self
, resID
):
730 'delete specified resource ID from our metabase'
731 self
.get_writer().saver
.delete_resource(resID
)
733 'save any pending resource assignments and schemas'
734 self
.get_writer().saver
.save_pending()
736 'discard any pending resource assignments and schemas'
737 self
.get_writer().saver
.rollback()
738 def queue_schema_obj(self
, schemaPath
, attr
, schemaObj
):
739 'add a schema to the list of pending schemas to commit'
740 self
.get_writer().saver
.queue_schema_obj(schemaPath
, attr
, schemaObj
)
741 def add_schema(self
, resID
, schemaObj
):
742 'assign a schema relation object to a worldbase resource name'
744 schemaPath
= SchemaPath(self
, '.'.join(l
[:-1]))
745 setattr(schemaPath
, l
[-1], schemaObj
)
746 def list_pending(self
):
747 return self
.get_writer().saver
.list_pending()
751 class Metabase(MetabaseBase
):
752 def __init__(self
, dbpath
, resourceCache
, zoneDict
=None, parent
=None, **kwargs
):
753 '''zoneDict provides a mechanism for the caller to request information
754 about what type of metabase this dbpath mapped to. zoneDict must
757 self
.Schema
= SchemaPath(self
)
758 self
.Data
= ResourceRoot(self
) # root of namespace
759 self
.resourceCache
= resourceCache
760 self
.debug
= True # single mdb should expose all errors
761 self
.download
= False
762 if zoneDict
is None: # user doesn't want zoneDict info
763 zoneDict
= {} # use a dummy dict, disposable
764 if dbpath
.startswith('http://'):
765 storage
= XMLRPCMetabase(dbpath
, self
, **kwargs
)
766 if 'remote' not in zoneDict
:
767 zoneDict
['remote'] = self
768 elif dbpath
.startswith('mysql:'):
769 storage
= MySQLMetabase(dbpath
[6:], self
, **kwargs
)
770 if 'MySQL' not in zoneDict
:
771 zoneDict
['MySQL'] = self
772 else: # TREAT AS LOCAL FILEPATH
773 dbpath
= os
.path
.expanduser(dbpath
)
774 storage
= ShelveMetabase(dbpath
, self
, **kwargs
)
775 if dbpath
== os
.path
.expanduser('~') \
776 or dbpath
.startswith(os
.path
.expanduser('~')+os
.sep
):
777 if 'my' not in zoneDict
:
778 zoneDict
['my'] = self
779 elif os
.path
.isabs(dbpath
):
780 if 'system' not in zoneDict
:
781 zoneDict
['system'] = self
782 elif dbpath
.split(os
.sep
)[0]==os
.curdir
:
783 if 'here' not in zoneDict
:
784 zoneDict
['here'] = self
785 elif 'subdir' not in zoneDict
:
786 zoneDict
['subdir'] = self
787 self
.storage
= storage
788 if storage
.zoneName
is not None and storage
.zoneName
not in zoneDict
:
789 zoneDict
[storage
.zoneName
] = self
# record this zone name
790 if storage
.writeable
:
791 self
.writeable
= True
792 self
.saver
= ResourceSaver(self
)
793 self
.writer
= self
# record downloaded resources here
795 self
.writeable
= False
796 def update(self
, worldbasePath
=None, debug
=None, keepCurrentPath
=False):
797 if not keepCurrentPath
: # metabase has fixed path
798 raise ValueError('You cannot change the path of a Metabase')
799 def find_resource(self
, resID
, download
=False):
800 yield self
.storage
.find_resource(resID
, download
)
801 def get_pending_or_find(self
, resID
, **kwargs
):
802 'find resID even if only pending (not actually saved yet)'
803 try: # 1st LOOK IN PENDING QUEUE
804 return self
.saver
.pendingData
[resID
]
807 return self(resID
,**kwargs
)
808 def getschema(self
, resID
):
809 'return dict of {attr:{args}} or KeyError if not found'
810 return self
.storage
.getschema(resID
)
811 def save_root_names(self
, rootNames
):
812 if self
.parent
is not None: # add names to parent's namespace as well
813 self
.parent
.save_root_names(rootNames
)
814 MetabaseBase
.save_root_names(self
, rootNames
) # call the generic method
815 def saveSchema(self
, resID
, attr
, args
):
816 'save an attribute binding rule to the schema; DO NOT use this internal interface unless you know what you are doing!'
817 self
.storage
.setschema(resID
, attr
, args
)
818 def saveSchemaEdge(self
, schema
):
819 'save schema edge to schema graph'
820 self
.saveSchema(schema
.name
, '-schemaEdge', schema
)
821 self
.storage
.graph
+= schema
.sourceDB
# ADD NODE TO SCHEMA GRAPH
822 self
.storage
.graph
[schema
.sourceDB
][schema
.targetDB
] = schema
.name
# EDGE
823 def dir(self
, pattern
='', matchType
='p', asDict
=False, download
=False):
824 return self
.storage
.dir(pattern
, matchType
, asDict
=asDict
,
828 class ZoneDict(UserDict
.DictMixin
):
829 'interface to current zones'
830 def __init__(self
, mdbList
):
831 self
.mdbList
= mdbList
832 def __getitem__(self
, zoneName
):
833 self
.mdbList
.update(keepCurrentPath
=True) # make sure metabases loaded
834 return self
.mdbList
.zoneDict
[zoneName
]
836 self
.mdbList
.update(keepCurrentPath
=True) # make sure metabases loaded
837 return self
.mdbList
.zoneDict
.keys()
839 self
.mdbList
.update(keepCurrentPath
=True) # make sure metabases loaded
840 return self
.mdbList
.zoneDict
.copy()
842 class MetabaseList(MetabaseBase
):
843 '''Primary interface for worldbase resource database access. A single instance
844 of this class is created upon import of the worldbase module, accessible as
845 worldbase.getResource. Users normally will have no need to create additional
846 instances of this class themselves.'''
847 # DEFAULT WORLDBASEPATH: HOME, CURRENT DIR, XMLRPC IN THAT ORDER
848 defaultPath
= ['~','.','http://biodb2.bioinformatics.ucla.edu:5000']
849 def __init__(self
, worldbasePath
=None, resourceCache
=None, separator
=',', mdbArgs
={}):
850 '''initializes attrs; does not connect to metabases'''
851 if resourceCache
is None: # create a cache for loaded resources
852 resourceCache
= ResourceCache()
853 self
.resourceCache
= resourceCache
855 self
.mdbArgs
= mdbArgs
857 self
.zones
= ZoneDict(self
) # interface to dict of zones
858 self
.worldbasePath
= worldbasePath
859 self
.separator
= separator
860 self
.Schema
= SchemaPath(self
)
861 self
.Data
= ResourceRoot(self
, zones
=self
.zones
) # root of namespace
862 self
.debug
= False # if one load attempt fails, try other metabases
863 self
.download
= False
865 def get_writer(self
):
866 'ensure that metabases are loaded, before looking for our writer'
867 self
.update(keepCurrentPath
=True) # make sure metabases loaded
868 return MetabaseBase
.get_writer(self
) # proceed as usual
869 def find_resource(self
, resID
, download
=False):
870 'search our metabases for pickle string and docstr for resID'
873 yield mdb
.find_resource(resID
, download
).next()
874 except KeyError: # not in this db
876 raise WorldbaseNotFoundError('unable to find %s in WORLDBASEPATH' % resID
)
877 def get_worldbase_path(self
):
878 'get environment var, or default in that order'
880 return os
.environ
['WORLDBASEPATH']
883 return os
.environ
['PYGRDATAPATH']
885 return self
.separator
.join(self
.defaultPath
)
886 def update(self
, worldbasePath
=None, debug
=None, keepCurrentPath
=False,
888 'get the latest list of resource databases'
889 if keepCurrentPath
: # only update if self.worldbasePath is None
890 worldbasePath
= self
.worldbasePath
891 if worldbasePath
is None: # get environment var or default
892 worldbasePath
= self
.get_worldbase_path()
896 mdbArgs
= self
.mdbArgs
897 if not self
.ready
or self
.worldbasePath
!= worldbasePath
: # reload
898 self
.worldbasePath
= worldbasePath
899 try: # disconnect from previous writeable interface if any
901 except AttributeError:
904 try: # default: we don't have a writeable mdb to save data in
906 except AttributeError:
909 for dbpath
in worldbasePath
.split(self
.separator
):
910 try: # connect to metabase
911 mdb
= Metabase(dbpath
, self
.resourceCache
, self
.zoneDict
, self
,
913 except (KeyboardInterrupt,SystemExit):
914 raise # DON'T TRAP THESE CONDITIONS
915 # FORCED TO ADOPT THIS STRUCTURE BECAUSE xmlrpc RAISES
916 # socket.gaierror WHICH IS NOT A SUBCLASS OF StandardError...
917 # SO I CAN'T JUST TRAP StandardError, UNFORTUNATELY...
918 except: # trap errors and continue to next metabase
920 raise # expose the error immediately
921 else: # warn the user but keep going...
923 traceback
.print_exc(10,sys
.stderr
) # JUST PRINT TRACEBACK
924 print >>sys
.stderr
,'''
925 WARNING: error accessing metabase %s. Continuing...''' % dbpath
926 else: # NO PROBLEM, SO ADD TO OUR RESOURCE DB LIST
927 self
.mdb
.append(mdb
) # SAVE TO OUR LIST OF RESOURCE DATABASES
928 if mdb
.writeable
and not hasattr(self
, 'writer'):
929 self
.writer
= mdb
# record as place to save resources
930 self
.ready
= True # metabases successfully loaded
931 def get_pending_or_find(self
, resID
, **kwargs
):
932 'find resID even if only pending (not actually saved yet)'
934 try: # 1st LOOK IN PENDING QUEUE
935 return mdb
.saver
.pendingData
[resID
]
938 return self(resID
, **kwargs
)
939 def registerServer(self
,locationKey
,serviceDict
):
940 'register the serviceDict with the first index server in WORLDBASEPATH'
942 if hasattr(mdb
.storage
, 'registerServer'):
943 n
= mdb
.storage
.registerServer(locationKey
, serviceDict
)
944 if n
== len(serviceDict
):
946 raise ValueError('unable to register services. Check WORLDBASEPATH')
947 def getschema(self
, resID
):
948 'search our resource databases for schema info for the desired ID'
951 return mdb
.getschema(resID
) # TRY TO OBTAIN FROM THIS DATABASE
953 pass # NOT IN THIS DB
954 raise KeyError('no schema info available for ' + resID
)
955 def dir(self
, pattern
='', matchType
='p', asDict
=False, download
=False):
956 'get list or dict of resources beginning with the specified string'
957 self
.update(keepCurrentPath
=True) # make sure metabases loaded
960 results
.append(mdb
.dir(pattern
, matchType
, asDict
=asDict
,
962 if asDict
: # merge result dictionaries
964 results
.reverse() # give first results highest precedence
965 for subdir
in results
:
968 else: # simply remove redundancy from results
971 filter(d
.setdefault
, l
) # add all entries to dict
976 class ResourceCache(dict):
977 'provide one central repository of loaded resources & schema info'
980 self
.schemaCache
= {}
982 dict.clear(self
) # clear our dictionary
983 self
.schemaCache
.clear() #
985 class ResourceSaver(object):
986 'queues new resources until committed to our mdb'
987 def __init__(self
, mdb
):
990 def clear_pending(self
):
991 self
.pendingData
= {} # CLEAR THE PENDING QUEUE
992 self
.pendingSchema
= {} # CLEAR THE PENDING QUEUE
995 self
.rollbackData
= {} # CLEAR THE ROLLBACK CACHE
996 def check_docstring(self
,obj
):
997 'enforce requirement for docstring, by raising exception if not present'
999 if obj
.__doc
__ is None or (hasattr(obj
.__class
__,'__doc__')
1000 and obj
.__doc
__==obj
.__class
__.__doc
__):
1001 raise AttributeError
1002 except AttributeError:
1003 raise ValueError('to save a resource object, you MUST give it a __doc__ string attribute describing it!')
1004 def add_resource(self
, resID
, obj
):
1005 'queue the object for saving to our metabase as <resID>'
1006 self
.check_docstring(obj
)
1007 obj
._persistent
_id
= resID
# MARK OBJECT WITH ITS PERSISTENT ID
1008 self
.pendingData
[resID
] = obj
# ADD TO QUEUE
1010 self
.rollbackData
[resID
] = self
.mdb
.resourceCache
[resID
]
1013 self
.cache_if_appropriate(resID
, obj
)
1014 def cache_if_appropriate(self
, resID
, obj
):
1016 if obj
._worldbase
_no
_cache
:
1017 return # do not cache this object; it is not ready to use!!
1018 except AttributeError:
1020 self
.mdb
.resourceCache
[resID
] = obj
# SAVE TO OUR CACHE
1021 def add_resource_dict(self
, d
):
1022 'queue a dict of name:object pairs for saving to metabase'
1023 for k
,v
in d
.items():
1024 self
.add_resource(k
, v
)
1025 def queue_schema_obj(self
, schemaPath
, attr
, schemaObj
):
1026 'add a schema object to the queue for saving to our metabase'
1027 resID
= schemaPath
.getPath(attr
) # GET STRING ID
1028 self
.pendingSchema
[resID
] = (schemaPath
,attr
,schemaObj
)
1029 def save_resource(self
, resID
, obj
):
1030 'save the object as <id>'
1031 self
.check_docstring(obj
)
1032 if obj
._persistent
_id
!= resID
:
1033 raise WorldbaseMismatchError('''The _persistent_id attribute for %s has changed!
1034 If you changed it, shame on you! Otherwise, this should not happen,
1035 so report the reproducible steps to this error message as a bug report.''' % resID
)
1036 self
.mdb
.storage
[resID
] = obj
# FINALLY, SAVE THE OBJECT TO THE DATABASE
1037 self
.cache_if_appropriate(resID
, obj
) # SAVE TO OUR CACHE
1038 def has_pending(self
):
1039 'return True if there are resources pending to be committed'
1040 return len(self
.pendingData
)>0 or len(self
.pendingSchema
)>0
1041 def save_pending(self
):
1042 'save any pending worldbase resources and schema'
1043 if len(self
.pendingData
)>0 or len(self
.pendingSchema
)>0:
1044 d
= self
.pendingData
1045 schemaDict
= self
.pendingSchema
1047 raise WorldbaseEmptyError('there is no data queued for saving!')
1048 for resID
,obj
in d
.items(): # now save the data
1049 self
.save_resource(resID
, obj
)
1050 for schemaPath
,attr
,schemaObj
in schemaDict
.values():# save schema
1051 schemaObj
.saveSchema(schemaPath
, attr
, self
.mdb
) # save each rule
1052 self
.clear_pending() # FINALLY, CLEAN UP...
1053 self
.lastData
= d
# keep as a historical record
1054 self
.lastSchema
= schemaDict
1055 def list_pending(self
):
1056 'return tuple of pending data dictionary, pending schema'
1057 return list(self
.pendingData
),list(self
.pendingSchema
)
1059 'dump any pending data without saving, and restore state of cache'
1060 if len(self
.pendingData
)==0 and len(self
.pendingSchema
)==0:
1061 raise WorldbaseEmptyError('there is no data queued for saving!')
1062 self
.mdb
.resourceCache
.update(self
.rollbackData
) # RESTORE THE ROLLBACK QUEUE
1063 self
.clear_pending()
1064 def delete_resource(self
, resID
): # incorporate this into commit-process?
1065 'delete the specified resource from resourceCache, saver and schema'
1066 del self
.mdb
.storage
[resID
] # delete from the resource database
1067 try: del self
.mdb
.resourceCache
[resID
] # delete from cache if exists
1068 except KeyError: pass
1069 try: del self
.pendingData
[resID
] # delete from queue if exists
1070 except KeyError: pass
1071 self
.delSchema(resID
)
1072 def delSchema(self
, resID
):
1073 'delete schema bindings TO and FROM this resource ID'
1074 storage
= self
.mdb
.storage
1076 d
= storage
.getschema(resID
) # GET THE EXISTING SCHEMA
1078 return # no schema stored for this object so nothing to do...
1079 self
.mdb
.resourceCache
.schemaCache
.clear() # THIS IS MORE AGGRESSIVE THAN NEEDED... COULD BE REFINED
1080 for attr
,obj
in d
.items():
1081 if attr
.startswith('-'): # A SCHEMA OBJECT
1082 obj
.delschema(storage
) # DELETE ITS SCHEMA RELATIONS
1083 storage
.delschema(resID
, attr
) # delete attribute schema rule
1086 self
.save_pending() # SEE WHETHER ANY DATA NEEDS SAVING
1087 print >>sys
.stderr
,'''
1088 WARNING: saving worldbase pending data that you forgot to save...
1089 Remember in the future, you must issue the command worldbase.commit() to save
1090 your pending worldbase resources to your resource database(s), or alternatively
1091 worldbase.rollback() to dump those pending data without saving them.
1092 It is a very bad idea to rely on this automatic attempt to save your
1093 forgotten data, because it is possible that the Python interpreter
1094 may never call this function at exit (for details see the atexit module
1095 docs in the Python Library Reference).'''
1096 except WorldbaseEmptyError
:
1100 class ResourceServer(XMLRPCServerBase
):
1101 'serves resources that can be transmitted on XMLRPC'
1102 def __init__(self
, mdb
, name
, serverClasses
=None, clientHost
=None,
1103 withIndex
=True, excludeClasses
=None, downloadDB
=None,
1104 resourceDict
=None, **kwargs
):
1105 'construct server for the designated classes'
1106 XMLRPCServerBase
.__init
__(self
, name
, **kwargs
)
1108 if resourceDict
is None:
1109 resourceDict
= mdb
.resourceCache
1110 if excludeClasses
is None: # DEFAULT: NO POINT IN SERVING SQL TABLES...
1111 from sqlgraph
import SQLTableBase
,SQLGraphClustered
1112 excludeClasses
= [SQLTableBase
,SQLGraphClustered
]
1113 if serverClasses
is None: # DEFAULT TO ALL CLASSES WE KNOW HOW TO SERVE
1114 from seqdb
import SequenceFileDB
,BlastDB
, \
1115 XMLRPCSequenceDB
,BlastDBXMLRPC
, \
1116 AnnotationDB
, AnnotationClient
, AnnotationServer
1117 serverClasses
=[(SequenceFileDB
,XMLRPCSequenceDB
,BlastDBXMLRPC
),
1118 (BlastDB
,XMLRPCSequenceDB
,BlastDBXMLRPC
),
1119 (AnnotationDB
,AnnotationClient
,AnnotationServer
)]
1121 from cnestedlist
import NLMSA
1122 from xnestedlist
import NLMSAClient
,NLMSAServer
1123 serverClasses
.append((NLMSA
,NLMSAClient
,NLMSAServer
))
1124 except ImportError: # cnestedlist NOT INSTALLED, SO SKIP...
1126 if clientHost
is None: # DEFAULT: USE THE SAME HOST STRING AS SERVER
1127 clientHost
= self
.host
1129 for id,obj
in resourceDict
.items(): # SAVE ALL OBJECTS MATCHING serverClasses
1131 for skipClass
in excludeClasses
: # CHECK LIST OF CLASSES TO EXCLUDE
1132 if isinstance(obj
,skipClass
):
1136 continue # DO NOT INCLUDE THIS OBJECT IN SERVER
1138 for baseKlass
,clientKlass
,serverKlass
in serverClasses
:
1139 if isinstance(obj
,baseKlass
) and not isinstance(obj
,clientKlass
):
1140 skipThis
=False # OK, WE CAN SERVE THIS CLASS
1142 if skipThis
: # HAS NO XMLRPC CLIENT-SERVER CLASS PAIRING
1143 try: # SAVE IT AS ITSELF
1144 self
.client_dict_setitem(clientDict
,id,obj
,badClasses
=nonPortableClasses
)
1145 except WorldbaseNotPortableError
:
1146 pass # HAS NON-PORTABLE LOCAL DEPENDENCIES, SO SKIP IT
1147 continue # GO ON TO THE NEXT DATA RESOURCE
1148 try: # TEST WHETHER obj CAN BE RE-CLASSED TO CLIENT / SERVER
1149 obj
.__class
__=serverKlass
# CONVERT TO SERVER CLASS FOR SERVING
1150 except TypeError: # GRR, EXTENSION CLASS CAN'T BE RE-CLASSED...
1151 state
=obj
.__getstate
__() # READ obj STATE
1152 newobj
=serverKlass
.__new
__(serverKlass
) # ALLOCATE NEW OBJECT
1153 newobj
.__setstate
__(state
) # AND INITIALIZE ITS STATE
1154 obj
=newobj
# THIS IS OUR RE-CLASSED VERSION OF obj
1155 try: # USE OBJECT METHOD TO SAVE HOST INFO, IF ANY...
1156 obj
.saveHostInfo(clientHost
, self
.port
, id)
1157 except AttributeError: # TRY TO SAVE URL AND NAME DIRECTLY ON obj
1158 obj
.url
= 'http://%s:%d' % (clientHost
,self
.port
)
1160 obj
.__class
__ = clientKlass
# CONVERT TO CLIENT CLASS FOR PICKLING
1161 self
.client_dict_setitem(clientDict
,id,obj
)
1162 obj
.__class
__ = serverKlass
# CONVERT TO SERVER CLASS FOR SERVING
1163 self
[id] = obj
# ADD TO XMLRPC SERVER
1164 self
.registrationData
= clientDict
# SAVE DATA FOR SERVER REGISTRATION
1165 if withIndex
: # SERVE OUR OWN INDEX AS A STATIC, READ-ONLY INDEX
1166 myIndex
= MetabaseServer(name
, readOnly
=True, # CREATE EMPTY INDEX
1167 downloadDB
=downloadDB
)
1168 self
['index'] = myIndex
# ADD TO OUR XMLRPC SERVER
1169 self
.register('', '', server
=myIndex
) # ADD OUR RESOURCES TO THE INDEX
1170 def client_dict_setitem(self
, clientDict
, k
, obj
, **kwargs
):
1171 'save pickle and schema for obj into clientDict'
1172 pickleString
= dumps(obj
,**kwargs
) # PICKLE THE CLIENT OBJECT, SAVE
1173 clientDict
[k
] = (get_info_dict(obj
,pickleString
),pickleString
)
1174 try: # SAVE SCHEMA INFO AS WELL...
1175 clientDict
['SCHEMA.'+k
] = (dict(schema_version
='1.0'),
1176 self
.mdb
.getschema(k
))
1178 pass # NO SCHEMA FOR THIS OBJ, SO NOTHING TO DO
1182 class ResourcePath(object):
1183 'simple way to read resource names as python foo.bar.bob expressions'
1184 def __init__(self
, mdb
, base
=None):
1185 self
.__dict
__['_path'] = base
# AVOID TRIGGERING setattr!
1186 self
.__dict
__['_mdb'] = mdb
1187 def getPath(self
, name
):
1188 if self
._path
is not None:
1189 return self
._path
+'.'+name
1192 def __getattr__(self
, name
):
1193 'extend the resource path by one more attribute'
1194 attr
= self
._pathClass
(self
._mdb
, self
.getPath(name
))
1195 # MUST NOT USE setattr BECAUSE WE OVERRIDE THIS BELOW!
1196 self
.__dict
__[name
] = attr
# CACHE THIS ATTRIBUTE ON THE OBJECT
1198 def __call__(self
, *args
, **kwargs
):
1199 'construct the requested resource'
1200 return self
._mdb
(self
._path
, *args
, **kwargs
)
1201 def __setattr__(self
, name
, obj
):
1202 'save obj using the specified resource name'
1203 self
._mdb
.add_resource(self
.getPath(name
), obj
)
1204 def __delattr__(self
, name
):
1205 self
._mdb
.delete_resource(self
.getPath(name
))
1206 try: # IF ACTUAL ATTRIBUTE EXISTS, JUST DELETE IT
1207 del self
.__dict
__[name
]
1208 except KeyError: # TRY TO DELETE RESOURCE FROM THE DATABASE
1209 pass # NOTHING TO DO
1210 def __dir__(self
, prefix
=None, start
=None):
1211 """return list of our attributes from worldbase search """
1213 start
= len(self
._path
) + 1 # skip past . separator
1215 l
= self
._mdb
.dir(prefix
)
1218 if name
.startswith(prefix
):
1219 d
[name
[start
:].split('.')[0]] = None
1221 ResourcePath
._pathClass
= ResourcePath
1223 class ResourceRoot(ResourcePath
):
1224 'provide proxy to public metabase methods'
1225 def __init__(self
, mdb
, base
=None, zones
=None):
1226 ResourcePath
.__init
__(self
, mdb
, base
)
1227 self
.__dict
__['schema'] = mdb
.Schema
# AVOID TRIGGERING setattr!
1228 if zones
is not None:
1229 self
.__dict
__['zones'] = zones
1230 for attr
in ('dir', 'commit', 'rollback', 'add_resource',
1231 'delete_resource', 'clear_cache', 'add_schema',
1232 'update', 'list_pending'):
1233 self
.__dict
__[attr
] = getattr(mdb
, attr
) # mirror metabase methods
1234 def __call__(self
, resID
, *args
, **kwargs
):
1235 """Construct the requested resource"""
1236 return self
._mdb
(resID
, *args
, **kwargs
)
1238 return ResourcePath
.__dir
__(self
, '', 0)
1240 class ResourceZone(object):
1241 'provide pygr.Data old-style interface to resource zones'
1242 def __init__(self
, mdb
, zoneName
):
1243 self
._mdbParent
= mdb
1244 self
._zoneName
= zoneName
1245 def __getattr__(self
, name
):
1246 self
._mdbParent
.update(keepCurrentPath
=True) # make sure metabases loaded
1248 mdb
= self
._mdbParent
.zoneDict
[self
._zoneName
] # get our zone
1250 raise ValueError('no zone "%s" available' % self
._zoneName
)
1251 if name
== 'schema': # get schema root
1252 return SchemaPath
.__getitem
__(self
, mdb
)
1253 else: # treat as regular worldbase string
1254 return ResourcePath
.__getitem
__(self
, mdb
, name
)
1256 class SchemaPath(ResourcePath
):
1257 'save schema information for a resource'
1258 def __setattr__(self
, name
, schema
):
1260 schema
.saveSchema
# VERIFY THAT THIS LOOKS LIKE A SCHEMA OBJECT
1261 except AttributeError:
1262 raise ValueError('not a valid schema object!')
1263 self
._mdb
.queue_schema_obj(self
, name
, schema
) # QUEUE IT
1264 def __delattr__(self
, attr
):
1265 raise NotImplementedError('schema deletion is not yet implemented.')
1266 SchemaPath
._pathClass
= SchemaPath
1269 class DirectRelation(object):
1270 'bind an attribute to the target'
1271 def __init__(self
, target
):
1272 self
.targetID
= getID(target
)
1273 def schemaDict(self
):
1274 return dict(targetID
=self
.targetID
)
1275 def saveSchema(self
, source
, attr
, mdb
, **kwargs
):
1276 d
= self
.schemaDict()
1277 d
.update(kwargs
) # ADD USER-SUPPLIED ARGS
1278 try: # IF kwargs SUPPLIED A TARGET, SAVE ITS ID
1279 d
['targetID'] = getID(d
['targetDB'])
1283 mdb
.saveSchema(getID(source
), attr
, d
)
1285 class ItemRelation(DirectRelation
):
1286 'bind item attribute to the target'
1287 def schemaDict(self
):
1288 return dict(targetID
=self
.targetID
,itemRule
=True)
1290 class ManyToManyRelation(object):
1291 'a general graph mapping from sourceDB -> targetDB with edge info'
1292 _relationCode
='many:many'
1293 def __init__(self
,sourceDB
,targetDB
,edgeDB
=None,bindAttrs
=None,
1294 sourceNotNone
=None,targetNotNone
=None):
1295 self
.sourceDB
=getID(sourceDB
) # CONVERT TO STRING RESOURCE ID
1296 self
.targetDB
=getID(targetDB
)
1297 if edgeDB
is not None:
1298 self
.edgeDB
=getID(edgeDB
)
1301 self
.bindAttrs
=bindAttrs
1302 if sourceNotNone
is not None:
1303 self
.sourceNotNone
= sourceNotNone
1304 if targetNotNone
is not None:
1305 self
.targetNotNone
= targetNotNone
1306 def save_graph_bindings(self
, graphDB
, attr
, mdb
):
1307 'save standard schema bindings to graphDB attributes sourceDB, targetDB, edgeDB'
1308 graphDB
= graphDB
.getPath(attr
) # GET STRING ID FOR source
1310 mdb
.saveSchemaEdge(self
) #SAVE THIS RULE
1311 b
= DirectRelation(self
.sourceDB
) # SAVE sourceDB BINDING
1312 b
.saveSchema(graphDB
, 'sourceDB', mdb
)
1313 b
= DirectRelation(self
.targetDB
) # SAVE targetDB BINDING
1314 b
.saveSchema(graphDB
, 'targetDB', mdb
)
1315 if self
.edgeDB
is not None: # SAVE edgeDB BINDING
1316 b
= DirectRelation(self
.edgeDB
)
1317 b
.saveSchema(graphDB
, 'edgeDB', mdb
)
1319 def saveSchema(self
, path
, attr
, mdb
):
1320 'save schema bindings associated with this rule'
1321 graphDB
= self
.save_graph_bindings(path
, attr
, mdb
)
1322 if self
.bindAttrs
is not None:
1323 bindObj
= (self
.sourceDB
,self
.targetDB
,self
.edgeDB
)
1324 bindArgs
= [{},dict(invert
=True),dict(getEdges
=True)]
1325 try: # USE CUSTOM INVERSE SCHEMA IF PROVIDED BY TARGET DB
1326 bindArgs
[1] = mdb
.get_pending_or_find(graphDB
)._inverse
_schema
()
1327 except AttributeError:
1330 if len(self
.bindAttrs
)>i
and self
.bindAttrs
[i
] is not None:
1331 b
= ItemRelation(graphDB
) # SAVE ITEM BINDING
1332 b
.saveSchema(bindObj
[i
], self
.bindAttrs
[i
],
1334 def delschema(self
,resourceDB
):
1335 'delete resource attribute bindings associated with this rule'
1336 if self
.bindAttrs
is not None:
1337 bindObj
=(self
.sourceDB
,self
.targetDB
,self
.edgeDB
)
1339 if len(self
.bindAttrs
)>i
and self
.bindAttrs
[i
] is not None:
1340 resourceDB
.delschema(bindObj
[i
],self
.bindAttrs
[i
])
1342 class OneToManyRelation(ManyToManyRelation
):
1343 _relationCode
='one:many'
1345 class OneToOneRelation(ManyToManyRelation
):
1346 _relationCode
='one:one'
1348 class ManyToOneRelation(ManyToManyRelation
):
1349 _relationCode
='many:one'
1351 class InverseRelation(DirectRelation
):
1352 "bind source and target as each other's inverse mappings"
1353 _relationCode
= 'inverse'
1354 def saveSchema(self
, source
, attr
, mdb
, **kwargs
):
1355 'save schema bindings associated with this rule'
1356 source
= source
.getPath(attr
) # GET STRING ID FOR source
1358 mdb
.saveSchemaEdge(self
) #SAVE THIS RULE
1359 DirectRelation
.saveSchema(self
, source
, 'inverseDB',
1360 mdb
, **kwargs
) # source -> target
1361 b
= DirectRelation(source
) # CREATE REVERSE MAPPING
1362 b
.saveSchema(self
.targetID
, 'inverseDB',
1363 mdb
, **kwargs
) # target -> source
1364 def delschema(self
,resourceDB
):
1365 resourceDB
.delschema(self
.targetID
,'inverseDB')
1368 'get persistent ID of the object or raise AttributeError'
1369 if isinstance(obj
,str): # TREAT ANY STRING AS A RESOURCE ID
1371 elif isinstance(obj
,ResourcePath
):
1372 return obj
._path
# GET RESOURCE ID FROM A ResourcePath
1374 try: # GET RESOURCE'S PERSISTENT ID
1375 return obj
._persistent
_id
1376 except AttributeError:
1377 raise AttributeError('this obj has no persistent ID!')