nss-pam-ldapd commit: r1615 - nss-pam-ldapd/pynslcd
[
Date Prev][
Date Next]
[
Thread Prev][
Thread Next]
nss-pam-ldapd commit: r1615 - nss-pam-ldapd/pynslcd
- From: Commits of the nss-pam-ldapd project <nss-pam-ldapd-commits [at] lists.arthurdejong.org>
- To: nss-pam-ldapd-commits [at] lists.arthurdejong.org
- Reply-to: nss-pam-ldapd-users [at] lists.arthurdejong.org
- Subject: nss-pam-ldapd commit: r1615 - nss-pam-ldapd/pynslcd
- Date: Sun, 29 Jan 2012 16:13:26 +0100 (CET)
Author: arthur
Date: Sun Jan 29 16:13:25 2012
New Revision: 1615
URL: http://arthurdejong.org/viewvc/nss-pam-ldapd?revision=1615&view=revision
Log:
implement a naive offline cache
Added:
nss-pam-ldapd/pynslcd/cache.py
Modified:
nss-pam-ldapd/pynslcd/alias.py
nss-pam-ldapd/pynslcd/common.py
nss-pam-ldapd/pynslcd/ether.py
nss-pam-ldapd/pynslcd/group.py
nss-pam-ldapd/pynslcd/host.py
nss-pam-ldapd/pynslcd/netgroup.py
nss-pam-ldapd/pynslcd/network.py
nss-pam-ldapd/pynslcd/passwd.py
nss-pam-ldapd/pynslcd/protocol.py
nss-pam-ldapd/pynslcd/rpc.py
nss-pam-ldapd/pynslcd/service.py
nss-pam-ldapd/pynslcd/shadow.py
Modified: nss-pam-ldapd/pynslcd/alias.py
==============================================================================
--- nss-pam-ldapd/pynslcd/alias.py Sun Jan 29 15:50:27 2012 (r1614)
+++ nss-pam-ldapd/pynslcd/alias.py Sun Jan 29 16:13:25 2012 (r1615)
@@ -18,6 +18,7 @@
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
+import cache
import common
import constants
@@ -33,6 +34,23 @@
required = ('cn', 'rfc822MailMember')
+class Cache(cache.Cache):
+
+ retrieve_sql = '''
+ SELECT `alias_cache`.`cn` AS `cn`,
+ `alias_1_cache`.`rfc822MailMember` AS `rfc822MailMember`
+ FROM `alias_cache`
+ LEFT JOIN `alias_1_cache`
+ ON `alias_1_cache`.`alias` = `alias_cache`.`cn`
+ '''
+
+ def retrieve(self, parameters):
+ query = cache.Query(self.retrieve_sql, parameters)
+ # return results, returning the members as a list
+ for row in cache.RowGrouper(query.execute(self.con), ('cn', ),
('rfc822MailMember', )):
+ yield row['cn'], row['rfc822MailMember']
+
+
class AliasRequest(common.Request):
def write(self, name, members):
Added: nss-pam-ldapd/pynslcd/cache.py
==============================================================================
--- /dev/null 00:00:00 1970 (empty, because file is newly added)
+++ nss-pam-ldapd/pynslcd/cache.py Sun Jan 29 16:13:25 2012 (r1615)
@@ -0,0 +1,312 @@
+
+# cache.py - caching layer for pynslcd
+#
+# Copyright (C) 2012 Arthur de Jong
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
+# 02110-1301 USA
+
+import datetime
+import itertools
+import os
+import sys
+
+import sqlite3
+
+
+# TODO: probably create a config table
+
+# FIXME: store the cache in the right place and make it configurable
+filename = '/var/run/nslcd/cache.sqlite'
+dirname = os.path.dirname(filename)
+if not os.path.isdir(dirname):
+ os.mkdir(dirname)
+con = sqlite3.connect(filename,
+ detect_types=sqlite3.PARSE_DECLTYPES, check_same_thread=False)
+con.row_factory = sqlite3.Row
+
+# FIXME: have some way to remove stale entries from the cache if all items
from LDAP are queried (perhas use TTL from all request)
+
+# set up the database
+con.executescript('''
+
+ -- store temporary tables in memory
+ PRAGMA temp_store = MEMORY;
+
+ -- disable sync() on database (corruption on disk failure)
+ PRAGMA synchronous = OFF;
+
+ -- put journal in memory (corruption if crash during transaction)
+ PRAGMA journal_mode = MEMORY;
+
+ -- tables for alias cache
+ CREATE TABLE IF NOT EXISTS `alias_cache`
+ ( `cn` TEXT PRIMARY KEY COLLATE NOCASE,
+ `mtime` TIMESTAMP NOT NULL );
+ CREATE TABLE IF NOT EXISTS `alias_1_cache`
+ ( `alias` TEXT NOT NULL COLLATE NOCASE,
+ `rfc822MailMember` TEXT NOT NULL,
+ FOREIGN KEY(`alias`) REFERENCES `alias_cache`(`cn`)
+ ON DELETE CASCADE ON UPDATE CASCADE );
+ CREATE INDEX IF NOT EXISTS `alias_1_idx` ON `alias_1_cache`(`alias`);
+
+ -- table for ethernet cache
+ CREATE TABLE IF NOT EXISTS `ether_cache`
+ ( `cn` TEXT NOT NULL COLLATE NOCASE,
+ `macAddress` TEXT NOT NULL COLLATE NOCASE,
+ `mtime` TIMESTAMP NOT NULL,
+ UNIQUE (`cn`, `macAddress`) );
+
+ -- table for group cache
+ CREATE TABLE IF NOT EXISTS `group_cache`
+ ( `cn` TEXT PRIMARY KEY,
+ `userPassword` TEXT,
+ `gidNumber` INTEGER NOT NULL UNIQUE,
+ `mtime` TIMESTAMP NOT NULL );
+ CREATE TABLE IF NOT EXISTS `group_3_cache`
+ ( `group` TEXT NOT NULL,
+ `memberUid` TEXT NOT NULL,
+ FOREIGN KEY(`group`) REFERENCES `group_cache`(`cn`)
+ ON DELETE CASCADE ON UPDATE CASCADE );
+ CREATE INDEX IF NOT EXISTS `group_3_idx` ON `group_3_cache`(`group`);
+
+ -- tables for host cache
+ CREATE TABLE IF NOT EXISTS `host_cache`
+ ( `cn` TEXT PRIMARY KEY COLLATE NOCASE,
+ `mtime` TIMESTAMP NOT NULL );
+ CREATE TABLE IF NOT EXISTS `host_1_cache`
+ ( `host` TEXT NOT NULL COLLATE NOCASE,
+ `cn` TEXT NOT NULL COLLATE NOCASE,
+ FOREIGN KEY(`host`) REFERENCES `host_cache`(`cn`)
+ ON DELETE CASCADE ON UPDATE CASCADE );
+ CREATE INDEX IF NOT EXISTS `host_1_idx` ON `host_1_cache`(`host`);
+ CREATE TABLE IF NOT EXISTS `host_2_cache`
+ ( `host` TEXT NOT NULL COLLATE NOCASE,
+ `ipHostNumber` TEXT NOT NULL,
+ FOREIGN KEY(`host`) REFERENCES `host_cache`(`cn`)
+ ON DELETE CASCADE ON UPDATE CASCADE );
+ CREATE INDEX IF NOT EXISTS `host_2_idx` ON `host_2_cache`(`host`);
+
+ -- FIXME: this does not work as entries are never removed from the cache
+ CREATE TABLE IF NOT EXISTS `netgroup_cache`
+ ( `cn` TEXT NOT NULL,
+ `member` TEXT NOT NULL,
+ `mtime` TIMESTAMP NOT NULL,
+ UNIQUE (`cn`, `member`) );
+
+ -- tables for network cache
+ CREATE TABLE IF NOT EXISTS `network_cache`
+ ( `cn` TEXT PRIMARY KEY COLLATE NOCASE,
+ `mtime` TIMESTAMP NOT NULL );
+ CREATE TABLE IF NOT EXISTS `network_1_cache`
+ ( `network` TEXT NOT NULL COLLATE NOCASE,
+ `cn` TEXT NOT NULL COLLATE NOCASE,
+ FOREIGN KEY(`network`) REFERENCES `network_cache`(`cn`)
+ ON DELETE CASCADE ON UPDATE CASCADE );
+ CREATE INDEX IF NOT EXISTS `network_1_idx` ON `network_1_cache`(`network`);
+ CREATE TABLE IF NOT EXISTS `network_2_cache`
+ ( `network` TEXT NOT NULL,
+ `ipNetworkNumber` TEXT NOT NULL,
+ FOREIGN KEY(`network`) REFERENCES `network_cache`(`cn`)
+ ON DELETE CASCADE ON UPDATE CASCADE );
+ CREATE INDEX IF NOT EXISTS `network_2_idx` ON `network_2_cache`(`network`);
+
+ -- table for passwd cache
+ CREATE TABLE IF NOT EXISTS `passwd_cache`
+ ( `uid` TEXT PRIMARY KEY,
+ `userPassword` TEXT,
+ `uidNumber` INTEGER NOT NULL UNIQUE,
+ `gidNumber` INTEGER NOT NULL,
+ `gecos` TEXT,
+ `homeDirectory` TEXT,
+ `loginShell` TEXT,
+ `mtime` TIMESTAMP NOT NULL );
+
+ -- table for protocol cache
+ CREATE TABLE IF NOT EXISTS `protocol_cache`
+ ( `cn` TEXT PRIMARY KEY,
+ `ipProtocolNumber` INTEGER NOT NULL,
+ `mtime` TIMESTAMP NOT NULL );
+ CREATE TABLE IF NOT EXISTS `protocol_1_cache`
+ ( `protocol` TEXT NOT NULL,
+ `cn` TEXT NOT NULL,
+ FOREIGN KEY(`protocol`) REFERENCES `protocol_cache`(`cn`)
+ ON DELETE CASCADE ON UPDATE CASCADE );
+ CREATE INDEX IF NOT EXISTS `protocol_1_idx` ON
`protocol_1_cache`(`protocol`);
+
+ -- table for rpc cache
+ CREATE TABLE IF NOT EXISTS `rpc_cache`
+ ( `cn` TEXT PRIMARY KEY,
+ `oncRpcNumber` INTEGER NOT NULL,
+ `mtime` TIMESTAMP NOT NULL );
+ CREATE TABLE IF NOT EXISTS `rpc_1_cache`
+ ( `rpc` TEXT NOT NULL,
+ `cn` TEXT NOT NULL,
+ FOREIGN KEY(`rpc`) REFERENCES `rpc_cache`(`cn`)
+ ON DELETE CASCADE ON UPDATE CASCADE );
+ CREATE INDEX IF NOT EXISTS `rpc_1_idx` ON `rpc_1_cache`(`rpc`);
+
+ -- tables for service cache
+ CREATE TABLE IF NOT EXISTS `service_cache`
+ ( `cn` TEXT NOT NULL,
+ `ipServicePort` INTEGER NOT NULL,
+ `ipServiceProtocol` TEXT NOT NULL,
+ `mtime` TIMESTAMP NOT NULL,
+ UNIQUE (`ipServicePort`, `ipServiceProtocol`) );
+ CREATE TABLE IF NOT EXISTS `service_1_cache`
+ ( `ipServicePort` INTEGER NOT NULL,
+ `ipServiceProtocol` TEXT NOT NULL,
+ `cn` TEXT NOT NULL,
+ FOREIGN KEY(`ipServicePort`) REFERENCES
`service_cache`(`ipServicePort`)
+ ON DELETE CASCADE ON UPDATE CASCADE,
+ FOREIGN KEY(`ipServiceProtocol`) REFERENCES
`service_cache`(`ipServiceProtocol`)
+ ON DELETE CASCADE ON UPDATE CASCADE );
+ CREATE INDEX IF NOT EXISTS `service_1_idx1` ON
`service_1_cache`(`ipServicePort`);
+ CREATE INDEX IF NOT EXISTS `service_1_idx2` ON
`service_1_cache`(`ipServiceProtocol`);
+
+ -- table for shadow cache
+ CREATE TABLE IF NOT EXISTS `shadow_cache`
+ ( `uid` TEXT PRIMARY KEY,
+ `userPassword` TEXT,
+ `shadowLastChange` INTEGER,
+ `shadowMin` INTEGER,
+ `shadowMax` INTEGER,
+ `shadowWarning` INTEGER,
+ `shadowInactive` INTEGER,
+ `shadowExpire` INTEGER,
+ `shadowFlag` INTEGER,
+ `mtime` TIMESTAMP NOT NULL );
+
+ ''')
+
+
+class Query(object):
+
+ def __init__(self, query, parameters=None):
+ self.query = query
+ self.wheres = []
+ self.parameters = []
+ if parameters:
+ for k, v in parameters.items():
+ self.add_where('`%s` = ?' % k, [v])
+
+ def add_query(self, query):
+ self.query += ' ' + query
+
+ def add_where(self, where, parameters):
+ self.wheres.append(where)
+ self.parameters += parameters
+
+ def execute(self, con):
+ query = self.query
+ if self.wheres:
+ query += ' WHERE ' + ' AND '.join(self.wheres)
+ c = con.cursor()
+ return c.execute(query, self.parameters)
+
+
+class CnAliasedQuery(Query):
+
+ sql = '''
+ SELECT `%(table)s_cache`.*,
+ `%(table)s_1_cache`.`cn` AS `alias`
+ FROM `%(table)s_cache`
+ LEFT JOIN `%(table)s_1_cache`
+ ON `%(table)s_1_cache`.`%(table)s` = `%(table)s_cache`.`cn`
+ '''
+
+ cn_join = '''
+ LEFT JOIN `%(table)s_1_cache` `cn_alias`
+ ON `cn_alias`.`%(table)s` = `%(table)s_cache`.`cn`
+ '''
+
+ def __init__(self, table, parameters):
+ args = dict(table=table)
+ super(CnAliasedQuery, self).__init__(self.sql % args)
+ for k, v in parameters.items():
+ if k == 'cn':
+ self.add_query(self.cn_join % args)
+ self.add_where('(`%(table)s_cache`.`cn` = ? OR `cn_alias`.`cn`
= ?)' % args, [v, v])
+ else:
+ self.add_where('`%s` = ?' % k, [v])
+
+
+class RowGrouper(object):
+ """Pass in query results and group the results by a certain specified
+ list of columns."""
+
+ def __init__(self, results, groupby, columns):
+ self.groupby = groupby
+ self.columns = columns
+ self.results = itertools.groupby(results, key=self.keyfunc)
+
+ def __iter__(self):
+ return self
+
+ def keyfunc(self, row):
+ return tuple(row[x] for x in self.groupby)
+
+ def next(self):
+ groupcols, rows = self.results.next()
+ tmp = dict((x, list()) for x in self.columns)
+ for row in rows:
+ for col in self.columns:
+ if row[col] is not None:
+ tmp[col].append(row[col])
+ result = dict(row)
+ result.update(tmp)
+ return result
+
+
+class Cache(object):
+
+ def __init__(self):
+ self.con = con
+ self.table = sys.modules[self.__module__].__name__
+
+ def store(self, *values):
+ """Store the values in the cache for the specified table."""
+ simple_values = []
+ multi_values = {}
+ for n, v in enumerate(values):
+ if isinstance(v, (list, tuple, set)):
+ multi_values[n] = v
+ else:
+ simple_values.append(v)
+ simple_values.append(datetime.datetime.now())
+ args = ', '.join(len(simple_values) * ('?', ))
+ con.execute('''
+ INSERT OR REPLACE INTO %s_cache
+ VALUES
+ (%s)
+ ''' % (self.table, args), simple_values)
+ for n, vlist in multi_values.items():
+ con.execute('''
+ DELETE FROM %s_%d_cache
+ WHERE `%s` = ?
+ ''' % (self.table, n, self.table), (values[0], ))
+ con.executemany('''
+ INSERT INTO %s_%d_cache
+ VALUES
+ (?, ?)
+ ''' % (self.table, n), ((values[0], x) for x in vlist))
+
+ def retrieve(self, parameters):
+ """Retrieve all items from the cache based on the parameters
supplied."""
+ query = Query('''
+ SELECT *
+ FROM %s_cache
+ ''' % self.table, parameters)
+ return (list(x)[:-1] for x in query.execute(self.con))
Modified: nss-pam-ldapd/pynslcd/common.py
==============================================================================
--- nss-pam-ldapd/pynslcd/common.py Sun Jan 29 15:50:27 2012 (r1614)
+++ nss-pam-ldapd/pynslcd/common.py Sun Jan 29 16:13:25 2012 (r1615)
@@ -25,6 +25,7 @@
import ldap.dn
from attmap import Attributes
+import cache
import cfg
import constants
@@ -187,6 +188,10 @@
self.calleruid = calleruid
module = sys.modules[self.__module__]
self.search = getattr(module, 'Search', None)
+ if not hasattr(module, 'cache_obj'):
+ cache_cls = getattr(module, 'Cache', None)
+ module.cache_obj = cache_cls() if cache_cls else None
+ self.cache = module.cache_obj
def read_parameters(self, fp):
"""This method should read the parameters from ths stream and
@@ -196,10 +201,23 @@
def handle_request(self, parameters):
"""This method handles the request based on the parameters read
with read_parameters()."""
- for dn, attributes in self.search(conn=self.conn,
parameters=parameters):
- for values in self.convert(dn, attributes, parameters):
- self.fp.write_int32(constants.NSLCD_RESULT_BEGIN)
- self.write(*values)
+ try:
+ with cache.con:
+ for dn, attributes in self.search(conn=self.conn,
parameters=parameters):
+ for values in self.convert(dn, attributes, parameters):
+ self.fp.write_int32(constants.NSLCD_RESULT_BEGIN)
+ self.write(*values)
+ if self.cache:
+ self.cache.store(*values)
+ except ldap.SERVER_DOWN:
+ if self.cache:
+ logging.debug('read from cache')
+ # we assume server went down before writing any entries
+ for values in self.cache.retrieve(parameters):
+ self.fp.write_int32(constants.NSLCD_RESULT_BEGIN)
+ self.write(*values)
+ else:
+ raise
# write the final result code
self.fp.write_int32(constants.NSLCD_RESULT_END)
Modified: nss-pam-ldapd/pynslcd/ether.py
==============================================================================
--- nss-pam-ldapd/pynslcd/ether.py Sun Jan 29 15:50:27 2012 (r1614)
+++ nss-pam-ldapd/pynslcd/ether.py Sun Jan 29 16:13:25 2012 (r1615)
@@ -20,6 +20,7 @@
import struct
+import cache
import common
import constants
@@ -46,6 +47,10 @@
required = ('cn', 'macAddress')
+class Cache(cache.Cache):
+ pass
+
+
class EtherRequest(common.Request):
def write(self, name, ether):
Modified: nss-pam-ldapd/pynslcd/group.py
==============================================================================
--- nss-pam-ldapd/pynslcd/group.py Sun Jan 29 15:50:27 2012 (r1614)
+++ nss-pam-ldapd/pynslcd/group.py Sun Jan 29 16:13:25 2012 (r1615)
@@ -18,9 +18,11 @@
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
+import itertools
import logging
from passwd import dn2uid, uid2dn
+import cache
import common
import constants
@@ -64,6 +66,24 @@
return super(Search, self).mk_filter()
+class Cache(cache.Cache):
+
+ retrieve_sql = '''
+ SELECT `cn`, `userPassword`, `gidNumber`, `memberUid`
+ FROM `group_cache`
+ LEFT JOIN `group_3_cache`
+ ON `group_3_cache`.`group` = `group_cache`.`cn`
+ '''
+
+ def retrieve(self, parameters):
+ query = cache.Query(self.retrieve_sql, parameters)
+ # return results returning the members as a set
+ q = itertools.groupby(query.execute(self.con),
+ key=lambda x: (x['cn'], x['userPassword'], x['gidNumber']))
+ for k, v in q:
+ yield k + (set(x['memberUid'] for x in v if x['memberUid'] is not
None), )
+
+
class GroupRequest(common.Request):
def write(self, name, passwd, gid, members):
Modified: nss-pam-ldapd/pynslcd/host.py
==============================================================================
--- nss-pam-ldapd/pynslcd/host.py Sun Jan 29 15:50:27 2012 (r1614)
+++ nss-pam-ldapd/pynslcd/host.py Sun Jan 29 16:13:25 2012 (r1615)
@@ -18,6 +18,7 @@
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
+import cache
import common
import constants
@@ -32,6 +33,31 @@
required = ('cn', )
+class HostQuery(cache.CnAliasedQuery):
+
+ sql = '''
+ SELECT `host_cache`.`cn` AS `cn`,
+ `host_1_cache`.`cn` AS `alias`,
+ `host_2_cache`.`ipHostNumber` AS `ipHostNumber`
+ FROM `host_cache`
+ LEFT JOIN `host_1_cache`
+ ON `host_1_cache`.`host` = `host_cache`.`cn`
+ LEFT JOIN `host_2_cache`
+ ON `host_2_cache`.`host` = `host_cache`.`cn`
+ '''
+
+ def __init__(self, parameters):
+ super(HostQuery, self).__init__('host', parameters)
+
+
+class Cache(cache.Cache):
+
+ def retrieve(self, parameters):
+ query = HostQuery(parameters)
+ for row in cache.RowGrouper(query.execute(self.con), ('cn', ),
('alias', 'ipHostNumber', )):
+ yield row['cn'], row['alias'], row['ipHostNumber']
+
+
class HostRequest(common.Request):
def write(self, hostname, aliases, addresses):
Modified: nss-pam-ldapd/pynslcd/netgroup.py
==============================================================================
--- nss-pam-ldapd/pynslcd/netgroup.py Sun Jan 29 15:50:27 2012 (r1614)
+++ nss-pam-ldapd/pynslcd/netgroup.py Sun Jan 29 16:13:25 2012 (r1615)
@@ -21,6 +21,7 @@
import logging
import re
+import cache
import common
import constants
@@ -40,6 +41,10 @@
required = ('cn', )
+class Cache(cache.Cache):
+ pass
+
+
class NetgroupRequest(common.Request):
def write(self, name, member):
Modified: nss-pam-ldapd/pynslcd/network.py
==============================================================================
--- nss-pam-ldapd/pynslcd/network.py Sun Jan 29 15:50:27 2012 (r1614)
+++ nss-pam-ldapd/pynslcd/network.py Sun Jan 29 16:13:25 2012 (r1615)
@@ -18,6 +18,7 @@
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
+import cache
import common
import constants
@@ -33,6 +34,31 @@
required = ('cn', )
+class NetworkQuery(cache.CnAliasedQuery):
+
+ sql = '''
+ SELECT `network_cache`.`cn` AS `cn`,
+ `network_1_cache`.`cn` AS `alias`,
+ `network_2_cache`.`ipNetworkNumber` AS `ipNetworkNumber`
+ FROM `network_cache`
+ LEFT JOIN `network_1_cache`
+ ON `network_1_cache`.`network` = `network_cache`.`cn`
+ LEFT JOIN `network_2_cache`
+ ON `network_2_cache`.`network` = `network_cache`.`cn`
+ '''
+
+ def __init__(self, parameters):
+ super(NetworkQuery, self).__init__('network', parameters)
+
+
+class Cache(cache.Cache):
+
+ def retrieve(self, parameters):
+ query = NetworkQuery(parameters)
+ for row in cache.RowGrouper(query.execute(self.con), ('cn', ),
('alias', 'ipNetworkNumber', )):
+ yield row['cn'], row['alias'], row['ipNetworkNumber']
+
+
class NetworkRequest(common.Request):
def write(self, networkname, aliases, addresses):
Modified: nss-pam-ldapd/pynslcd/passwd.py
==============================================================================
--- nss-pam-ldapd/pynslcd/passwd.py Sun Jan 29 15:50:27 2012 (r1614)
+++ nss-pam-ldapd/pynslcd/passwd.py Sun Jan 29 16:13:25 2012 (r1615)
@@ -20,6 +20,7 @@
import logging
+import cache
import common
import constants
@@ -43,6 +44,10 @@
'loginShell')
+class Cache(cache.Cache):
+ pass
+
+
class PasswdRequest(common.Request):
def write(self, name, passwd, uid, gid, gecos, home, shell):
Modified: nss-pam-ldapd/pynslcd/protocol.py
==============================================================================
--- nss-pam-ldapd/pynslcd/protocol.py Sun Jan 29 15:50:27 2012 (r1614)
+++ nss-pam-ldapd/pynslcd/protocol.py Sun Jan 29 16:13:25 2012 (r1615)
@@ -18,6 +18,7 @@
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
+import cache
import common
import constants
@@ -33,6 +34,14 @@
required = ('cn', 'ipProtocolNumber')
+class Cache(cache.Cache):
+
+ def retrieve(self, parameters):
+ query = cache.CnAliasedQuery('protocol', parameters)
+ for row in cache.RowGrouper(query.execute(self.con), ('cn', ),
('alias', )):
+ yield row['cn'], row['alias'], row['ipProtocolNumber']
+
+
class ProtocolRequest(common.Request):
def write(self, name, names, number):
Modified: nss-pam-ldapd/pynslcd/rpc.py
==============================================================================
--- nss-pam-ldapd/pynslcd/rpc.py Sun Jan 29 15:50:27 2012 (r1614)
+++ nss-pam-ldapd/pynslcd/rpc.py Sun Jan 29 16:13:25 2012 (r1615)
@@ -18,6 +18,7 @@
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
+import cache
import common
import constants
@@ -33,6 +34,14 @@
required = ('cn', 'oncRpcNumber')
+class Cache(cache.Cache):
+
+ def retrieve(self, parameters):
+ query = cache.CnAliasedQuery('rpc', parameters)
+ for row in cache.RowGrouper(query.execute(self.con), ('cn', ),
('alias', )):
+ yield row['cn'], row['alias'], row['oncRpcNumber']
+
+
class RpcRequest(common.Request):
def write(self, name, aliases, number):
Modified: nss-pam-ldapd/pynslcd/service.py
==============================================================================
--- nss-pam-ldapd/pynslcd/service.py Sun Jan 29 15:50:27 2012 (r1614)
+++ nss-pam-ldapd/pynslcd/service.py Sun Jan 29 16:13:25 2012 (r1615)
@@ -18,9 +18,11 @@
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
+import datetime
import ldap.filter
import logging
+import cache
import common
import constants
@@ -39,6 +41,58 @@
required = ('cn', 'ipServicePort', 'ipServiceProtocol')
+class ServiceQuery(cache.CnAliasedQuery):
+
+ sql = '''
+ SELECT `service_cache`.*,
+ `service_1_cache`.`cn` AS `alias`
+ FROM `service_cache`
+ LEFT JOIN `service_1_cache`
+ ON `service_1_cache`.`ipServicePort` =
`service_cache`.`ipServicePort`
+ AND `service_1_cache`.`ipServiceProtocol` =
`service_cache`.`ipServiceProtocol`
+ '''
+
+ cn_join = '''
+ LEFT JOIN `service_1_cache` `cn_alias`
+ ON `cn_alias`.`ipServicePort` = `service_cache`.`ipServicePort`
+ AND `cn_alias`.`ipServiceProtocol` =
`service_cache`.`ipServiceProtocol`
+ '''
+
+ def __init__(self, parameters):
+ super(ServiceQuery, self).__init__('service', {})
+ for k, v in parameters.items():
+ if k == 'cn':
+ self.add_query(self.cn_join)
+ self.add_where('(`service_cache`.`cn` = ? OR `cn_alias`.`cn` =
?)', [v, v])
+ else:
+ self.add_where('`service_cache`.`%s` = ?' % k, [v])
+
+
+class Cache(cache.Cache):
+
+ def store(self, name, aliases, port, protocol):
+ self.con.execute('''
+ INSERT OR REPLACE INTO `service_cache`
+ VALUES
+ (?, ?, ?, ?)
+ ''', (name, port, protocol, datetime.datetime.now()))
+ self.con.execute('''
+ DELETE FROM `service_1_cache`
+ WHERE `ipServicePort` = ?
+ AND `ipServiceProtocol` = ?
+ ''', (port, protocol))
+ self.con.executemany('''
+ INSERT INTO `service_1_cache`
+ VALUES
+ (?, ?, ?)
+ ''', ((port, protocol, alias) for alias in aliases))
+
+ def retrieve(self, parameters):
+ query = ServiceQuery(parameters)
+ for row in cache.RowGrouper(query.execute(self.con), ('cn',
'ipServicePort', 'ipServiceProtocol'), ('alias', )):
+ yield row['cn'], row['alias'], row['ipServicePort'],
row['ipServiceProtocol']
+
+
class ServiceRequest(common.Request):
def write(self, name, aliases, port, protocol):
Modified: nss-pam-ldapd/pynslcd/shadow.py
==============================================================================
--- nss-pam-ldapd/pynslcd/shadow.py Sun Jan 29 15:50:27 2012 (r1614)
+++ nss-pam-ldapd/pynslcd/shadow.py Sun Jan 29 16:13:25 2012 (r1615)
@@ -20,6 +20,7 @@
import logging
+import cache
import common
import constants
@@ -43,6 +44,10 @@
required = ('uid', )
+class Cache(cache.Cache):
+ pass
+
+
class ShadowRequest(common.Request):
def write(self, name, passwd, lastchangedate, mindays, maxdays, warndays,
--
To unsubscribe send an email to
nss-pam-ldapd-commits-unsubscribe@lists.arthurdejong.org or see
http://lists.arthurdejong.org/nss-pam-ldapd-commits/
- nss-pam-ldapd commit: r1615 - nss-pam-ldapd/pynslcd,
Commits of the nss-pam-ldapd project