int32位 posted @ Oct 09, 2014 10:05:34 AM in python , 3215 阅读

python中调用a.xx,内部就是a.__getattr__(xx)或者getattr(a, xx),而a.xx(),其中xx实现了__call__()方法,即调用了getattr(a, xx)()。


class Test:
	def __init__(self): = 5
	def get(self):
		print("getting ...")
	def update(self):
		print("updating ...")
	def delete(self):
		print("deleting ...")
class Wapper:
	def __init__(self, backend = None):
		self.backend = backend
	def __getattr__(self, key):
		return getattr(self.backend, key)
if __name__ == "__main__":
	test = Test()
	wapper = Wapper(backend = test)


from nova.openstack.common.db import api as db_api
_BACKEND_MAPPING = {'sqlalchemy': 'nova.db.sqlalchemy.api'}
IMPL = db_api.DBAPI(backend_mapping=_BACKEND_MAPPING)
def compute_node_get_all(context, no_date_fields=False):
    """Get all computeNodes.

    :param context: The security context
    :param no_date_fields: If set to True, excludes 'created_at', 'updated_at',
                           'deteled_at' and 'deleted' fields from the output,
                           thus significantly reducing its size.
                           Set to False by default

    :returns: List of dictionaries each containing compute node properties,
              including corresponding service and stats
    return IMPL.compute_node_get_all(context, no_date_fields)


class DBAPI(object):
    def __init__(self, backend_mapping=None):
        if backend_mapping is None:
            backend_mapping = {}
        self.__backend = None
        self.__backend_mapping = backend_mapping

    @lockutils.synchronized('dbapi_backend', 'nova-')
    def __get_backend(self):
        """Get the actual backend.  May be a module or an instance of
        a class.  Doesn't matter to us.  We do this synchronized as it's
        possible multiple greenthreads started very quickly trying to do
        DB calls and eventlet can switch threads before self.__backend gets
        if self.__backend:
            # Another thread assigned it
            return self.__backend
        backend_name = CONF.database.backend
        self.__use_tpool = CONF.database.use_tpool
        if self.__use_tpool:
            from eventlet import tpool
            self.__tpool = tpool
        # Import the untranslated name if we don't have a
        # mapping.
        backend_path = self.__backend_mapping.get(backend_name,
        backend_mod = importutils.import_module(backend_path)
        self.__backend = backend_mod.get_backend()
        return self.__backend

    def __getattr__(self, key):
        backend = self.__backend or self.__get_backend()
        attr = getattr(backend, key)
        if not self.__use_tpool or not hasattr(attr, '__call__'):
            return attr

        def tpool_wrapper(*args, **kwargs):
            return self.__tpool.execute(attr, *args, **kwargs)

        functools.update_wrapper(tpool_wrapper, attr)
        return tpool_wrapper


def compute_node_get_all(context, no_date_fields):

    # NOTE(msdubov): Using lower-level 'select' queries and joining the tables
    #                manually here allows to gain 3x speed-up and to have 5x
    #                less network load / memory usage compared to the sqla ORM.

    engine = get_engine()

    # Retrieve ComputeNode, Service, Stat.
    compute_node = models.ComputeNode.__table__
    service = models.Service.__table__
    stat = models.ComputeNodeStat.__table__

    with engine.begin() as conn:
        redundant_columns = set(['deleted_at', 'created_at', 'updated_at',
                                 'deleted']) if no_date_fields else set([])

        def filter_columns(table):
            return [c for c in table.c if not in redundant_columns]

        compute_node_query = select(filter_columns(compute_node)).\
                                where(compute_node.c.deleted == 0).\
        compute_node_rows = conn.execute(compute_node_query).fetchall()

        service_query = select(filter_columns(service)).\
                            where((service.c.deleted == 0) &
                                  (service.c.binary == 'nova-compute')).\
        service_rows = conn.execute(service_query).fetchall()

        stat_query = select(filter_columns(stat)).\
                        where(stat.c.deleted == 0).\
        stat_rows = conn.execute(stat_query).fetchall()

    # NOTE(msdubov): Transferring sqla.RowProxy objects to dicts.
    stats = [dict(proxy.items()) for proxy in stat_rows]

    # Join ComputeNode & Service manually.
    services = {}
    for proxy in service_rows:
        services[proxy['id']] = dict(proxy.items())

    compute_nodes = []
    for proxy in compute_node_rows:
        node = dict(proxy.items())
        node['service'] = services.get(proxy['service_id'])


    # Join ComputeNode & ComputeNodeStat manually.
    # NOTE(msdubov): ComputeNode and ComputeNodeStat map 1-to-Many.
    #                Running time is (asymptotically) optimal due to the use
    #                of iterators (itertools.groupby() for ComputeNodeStat and
    #                iter() for ComputeNode) - we handle each record only once.
    compute_nodes.sort(key=lambda node: node['id'])
    compute_nodes_iter = iter(compute_nodes)
    for nid, nsts in itertools.groupby(stats, lambda s: s['compute_node_id']):
        for node in compute_nodes_iter:
            if node['id'] == nid:
                node['stats'] = list(nsts)
                node['stats'] = []

    return compute_nodes


  • 无匹配
  • 无匹配
Digital Ali 说:
2021年9月05日 18:27 I wanted to thank you for this excellent read!! I definitely loved every little bit of it. I have you bookmarked your site to check out the new stuff you post.
Gullam Mohiyoddin 说:
2021年9月28日 01:33

Howdy, I discovered your blog per Google bit searching for such really enlightening slanting toward other than your issue sees all around mistaking for me. ไฮโล วิธีเล่น

William 说:
2021年9月30日 02:23

Howdy, I discovered your blog per Google bit searching for such really enlightening slanting toward other than your issue sees all around mistaking for me. สมัคร star vegas

William 说:
2021年10月01日 00:24

This is consistently flooring substance! I have totally respected taking a gander at your obsessions and have shown at the target that you are huge concerning monster levels of them. You are obliterating. สมัคร sagaming

William Johnson 说:
2021年10月02日 18:42

I can propose persistently incessant and perseveringly careful tips, fittingly see it: metamorphosis literary agency

William Johnson 说:
2021年10月03日 00:06

I'm enthusiastically keeping on the web for storys that can oblige me. There is actually a substitute to understand about this. I feel you made enduringly acceptably hardly any salubrious obsessions in Attributes also. Tie included, epic calling! หวยหุ้น

link 说:
2021年10月06日 20:23

This is a truly good site post. Not too many people would actually, the way you just did. I am really impressed that there is so much information about this subject that have been uncovered and you’ve done your best, with so much class. If wanted to know more about green smoke reviews, than by all means come in and check our stuff. สล็อต

link 说:
2021年10月09日 22:20

I think this is an informative post and it is very useful and knowledgeable. therefore, I would like to thank you for the efforts you have made in writing this article. สล็อต999

William Johnson 说:
2021年10月22日 12:03

I truly respect this hair-raising post that you have obliged us. I ensure this would be key for by a wide edge a goliath piece of people. 카지노사이트

William Johnson 说:
2022年1月21日 02:08

All around befuddling subject, in each monster sense, unsafe signs are I was unable to say whether they are just conceivable as frontal cortex blowing as your work out.

William Johnson 说:
2022年2月17日 01:56

A couple of evaluations about it and today I expected to examine it again thinking about how it is especially made. lottery post

William Johnson 说:
2022年6月13日 18:24

Howdy, I discovered your blog per Google bit searching for such really enlightening slanting toward other than your issue sees all around mistaking for me. home goods

登录 *

loading captcha image...
or Ctrl+Enter