X-Git-Url: http://git.samba.org/?a=blobdiff_plain;f=python%2Fsamba%2Fdbchecker.py;h=6e84f1903c8b0ee32bcc1b3271f7ca370ec7db91;hb=87bbc2df972d112870bb7d5c8839663affe62b21;hp=f0c2217f68338a964618ebdb2b425fea566958e3;hpb=5361fc68436b4dcd8d1b7174daee27c78d3c7ade;p=samba.git
diff --git a/python/samba/dbchecker.py b/python/samba/dbchecker.py
index f0c2217f683..6e84f1903c8 100644
--- a/python/samba/dbchecker.py
+++ b/python/samba/dbchecker.py
@@ -17,6 +17,7 @@
# along with this program. If not, see .
#
+from __future__ import print_function
import ldb
import samba
import time
@@ -31,6 +32,8 @@ from samba.common import dsdb_Dn
from samba.dcerpc import security
from samba.descriptor import get_wellknown_sds, get_diff_sds
from samba.auth import system_session, admin_session
+from samba.netcmd import CommandError
+from samba.netcmd.fsmo import get_fsmo_roleowner
class dbcheck(object):
@@ -52,15 +55,20 @@ class dbcheck(object):
self.fix_all_duplicates = False
self.fix_all_DN_GUIDs = False
self.fix_all_binary_dn = False
- self.remove_all_deleted_DN_links = False
+ self.remove_implausible_deleted_DN_links = False
+ self.remove_plausible_deleted_DN_links = False
self.fix_all_string_dn_component_mismatch = False
self.fix_all_GUID_dn_component_mismatch = False
self.fix_all_SID_dn_component_mismatch = False
+ self.fix_all_old_dn_string_component_mismatch = False
self.fix_all_metadata = False
self.fix_time_metadata = False
self.fix_undead_linked_attributes = False
self.fix_all_missing_backlinks = False
self.fix_all_orphaned_backlinks = False
+ self.fix_all_missing_forward_links = False
+ self.duplicate_link_cache = dict()
+ self.recover_all_forward_links = False
self.fix_rmd_flags = False
self.fix_ntsecuritydescriptor = False
self.fix_ntsecuritydescriptor_owner_group = False
@@ -77,6 +85,7 @@ class dbcheck(object):
self.fix_base64_userparameters = False
self.fix_utf8_userparameters = False
self.fix_doubled_userparameters = False
+ self.fix_sid_rid_set_conflict = False
self.reset_well_known_acls = reset_well_known_acls
self.reset_all_well_known_acls = False
self.in_transaction = in_transaction
@@ -89,16 +98,19 @@ class dbcheck(object):
self.wellknown_sds = get_wellknown_sds(self.samdb)
self.fix_all_missing_objectclass = False
self.fix_missing_deleted_objects = False
+ self.fix_replica_locations = False
+ self.fix_missing_rid_set_master = False
self.dn_set = set()
self.link_id_cache = {}
self.name_map = {}
try:
res = samdb.search(base="CN=DnsAdmins,CN=Users,%s" % samdb.domain_dn(), scope=ldb.SCOPE_BASE,
- attrs=["objectSid"])
+ attrs=["objectSid"])
dnsadmins_sid = ndr_unpack(security.dom_sid, res[0]["objectSid"][0])
self.name_map['DnsAdmins'] = str(dnsadmins_sid)
- except ldb.LdbError, (enum, estr):
+ except ldb.LdbError as e5:
+ (enum, estr) = e5.args
if enum != ldb.ERR_NO_SUCH_OBJECT:
raise
pass
@@ -122,6 +134,7 @@ class dbcheck(object):
res = self.samdb.search(base="", scope=ldb.SCOPE_BASE, attrs=['namingContexts'])
self.deleted_objects_containers = []
self.ncs_lacking_deleted_containers = []
+ self.dns_partitions = []
try:
self.ncs = res[0]["namingContexts"]
except KeyError:
@@ -131,11 +144,67 @@ class dbcheck(object):
for nc in self.ncs:
try:
- dn = self.samdb.get_wellknown_dn(ldb.Dn(self.samdb, nc),
+ dn = self.samdb.get_wellknown_dn(ldb.Dn(self.samdb, nc.decode('utf8')),
dsdb.DS_GUID_DELETED_OBJECTS_CONTAINER)
self.deleted_objects_containers.append(dn)
except KeyError:
- self.ncs_lacking_deleted_containers.append(ldb.Dn(self.samdb, nc))
+ self.ncs_lacking_deleted_containers.append(ldb.Dn(self.samdb, nc.decode('utf8')))
+
+ domaindns_zone = 'DC=DomainDnsZones,%s' % self.samdb.get_default_basedn()
+ forestdns_zone = 'DC=ForestDnsZones,%s' % self.samdb.get_root_basedn()
+ domain = self.samdb.search(scope=ldb.SCOPE_ONELEVEL,
+ attrs=["msDS-NC-Replica-Locations", "msDS-NC-RO-Replica-Locations"],
+ base=self.samdb.get_partitions_dn(),
+ expression="(&(objectClass=crossRef)(ncName=%s))" % domaindns_zone)
+ if len(domain) == 1:
+ self.dns_partitions.append((ldb.Dn(self.samdb, forestdns_zone), domain[0]))
+
+ forest = self.samdb.search(scope=ldb.SCOPE_ONELEVEL,
+ attrs=["msDS-NC-Replica-Locations", "msDS-NC-RO-Replica-Locations"],
+ base=self.samdb.get_partitions_dn(),
+ expression="(&(objectClass=crossRef)(ncName=%s))" % forestdns_zone)
+ if len(forest) == 1:
+ self.dns_partitions.append((ldb.Dn(self.samdb, domaindns_zone), forest[0]))
+
+ fsmo_dn = ldb.Dn(self.samdb, "CN=RID Manager$,CN=System," + self.samdb.domain_dn())
+ rid_master = get_fsmo_roleowner(self.samdb, fsmo_dn, "rid")
+ if ldb.Dn(self.samdb, self.samdb.get_dsServiceName()) == rid_master:
+ self.is_rid_master = True
+ else:
+ self.is_rid_master = False
+
+ # To get your rid set
+ # 1. Get server name
+ res = self.samdb.search(base=ldb.Dn(self.samdb, self.samdb.get_serverName()),
+ scope=ldb.SCOPE_BASE, attrs=["serverReference"])
+ # 2. Get server reference
+ self.server_ref_dn = ldb.Dn(self.samdb, res[0]['serverReference'][0].decode('utf8'))
+
+ # 3. Get RID Set
+ res = self.samdb.search(base=self.server_ref_dn,
+ scope=ldb.SCOPE_BASE, attrs=['rIDSetReferences'])
+ if "rIDSetReferences" in res[0]:
+ self.rid_set_dn = ldb.Dn(self.samdb, res[0]['rIDSetReferences'][0].decode('utf8'))
+ else:
+ self.rid_set_dn = None
+
+ self.compatibleFeatures = []
+ self.requiredFeatures = []
+
+ try:
+ res = self.samdb.search(scope=ldb.SCOPE_BASE,
+ base="@SAMBA_DSDB",
+ attrs=["compatibleFeatures",
+ "requiredFeatures"])
+ if "compatibleFeatures" in res[0]:
+ self.compatibleFeatures = res[0]["compatibleFeatures"]
+ if "requiredFeatures" in res[0]:
+ self.requiredFeatures = res[0]["requiredFeatures"]
+ except ldb.LdbError as e6:
+ (enum, estr) = e6.args
+ if enum != ldb.ERR_NO_SUCH_OBJECT:
+ raise
+ pass
def check_database(self, DN=None, scope=ldb.SCOPE_SUBTREE, controls=[], attrs=['*']):
'''perform a database check, returning the number of errors found'''
@@ -160,7 +229,6 @@ class dbcheck(object):
self.report('Checked %u objects (%u errors)' % (len(res), error_count))
return error_count
-
def check_deleted_objects_containers(self):
"""This function only fixes conflicts on the Deleted Objects
containers, not the attributes"""
@@ -188,7 +256,8 @@ class dbcheck(object):
"CN=Deleted Objects\\0ACNF:%s" % str(misc.GUID(guid)))
conflict_dn.add_base(nc)
- except ldb.LdbError, (enum, estr):
+ except ldb.LdbError as e2:
+ (enum, estr) = e2.args
if enum == ldb.ERR_NO_SUCH_OBJECT:
pass
else:
@@ -198,7 +267,8 @@ class dbcheck(object):
if conflict_dn is not None:
try:
self.samdb.rename(dn, conflict_dn, ["show_deleted:1", "relax:0", "show_recycled:1"])
- except ldb.LdbError, (enum, estr):
+ except ldb.LdbError as e1:
+ (enum, estr) = e1.args
self.report("Couldn't move old Deleted Objects placeholder: %s to %s: %s" % (dn, conflict_dn, estr))
return 1
@@ -216,7 +286,7 @@ class dbcheck(object):
listwko = []
proposed_objectguid = None
for o in wko:
- dsdb_dn = dsdb_Dn(self.samdb, o, dsdb.DSDB_SYNTAX_BINARY_DN)
+ dsdb_dn = dsdb_Dn(self.samdb, o.decode('utf8'), dsdb.DSDB_SYNTAX_BINARY_DN)
if self.is_deleted_objects_dn(dsdb_dn):
self.report("wellKnownObjects had duplicate Deleted Objects value %s" % o)
# We really want to put this back in the same spot
@@ -304,7 +374,9 @@ systemFlags: -1946157056%s""" % (dn, guid_suffix),
try:
controls = controls + ["local_oid:%s:0" % dsdb.DSDB_CONTROL_DBCHECK]
self.samdb.delete(dn, controls=controls)
- except Exception, err:
+ except Exception as err:
+ if self.in_transaction:
+ raise CommandError("%s : %s" % (msg, err))
self.report("%s : %s" % (msg, err))
return False
return True
@@ -316,7 +388,9 @@ systemFlags: -1946157056%s""" % (dn, guid_suffix),
try:
controls = controls + ["local_oid:%s:0" % dsdb.DSDB_CONTROL_DBCHECK]
self.samdb.modify(m, controls=controls, validate=validate)
- except Exception, err:
+ except Exception as err:
+ if self.in_transaction:
+ raise CommandError("%s : %s" % (msg, err))
self.report("%s : %s" % (msg, err))
return False
return True
@@ -333,7 +407,9 @@ newSuperior: %s""" % (str(from_dn), str(to_rdn), str(to_base)))
to_dn = to_rdn + to_base
controls = controls + ["local_oid:%s:0" % dsdb.DSDB_CONTROL_DBCHECK]
self.samdb.rename(from_dn, to_dn, controls=controls)
- except Exception, err:
+ except Exception as err:
+ if self.in_transaction:
+ raise CommandError("%s : %s" % (msg, err))
self.report("%s : %s" % (msg, err))
return False
return True
@@ -387,7 +463,7 @@ newSuperior: %s""" % (str(from_dn), str(to_rdn), str(to_base)))
m['value_%u' % i] = ldb.MessageElement(val, ldb.FLAG_MOD_DELETE, attrname)
if nval != '':
m['normv_%u' % i] = ldb.MessageElement(nval, ldb.FLAG_MOD_ADD,
- attrname)
+ attrname)
if self.do_modify(m, ["relax:0", "show_recycled:1"],
"Failed to normalise attribute %s" % attrname,
@@ -445,13 +521,21 @@ newSuperior: %s""" % (str(from_dn), str(to_rdn), str(to_base)))
"Failed to remove DN %s" % dn):
self.report("Removed DN %s" % dn)
- def err_deleted_dn(self, dn, attrname, val, dsdb_dn, correct_dn):
+ def err_deleted_dn(self, dn, attrname, val, dsdb_dn, correct_dn, remove_plausible=False):
"""handle a DN pointing to a deleted object"""
- self.report("ERROR: target DN is deleted for %s in object %s - %s" % (attrname, dn, val))
- self.report("Target GUID points at deleted DN %r" % str(correct_dn))
- if not self.confirm_all('Remove DN link?', 'remove_all_deleted_DN_links'):
- self.report("Not removing")
- return
+ if not remove_plausible:
+ self.report("ERROR: target DN is deleted for %s in object %s - %s" % (attrname, dn, val))
+ self.report("Target GUID points at deleted DN %r" % str(correct_dn))
+ if not self.confirm_all('Remove DN link?', 'remove_implausible_deleted_DN_links'):
+ self.report("Not removing")
+ return
+ else:
+ self.report("WARNING: target DN is deleted for %s in object %s - %s" % (attrname, dn, val))
+ self.report("Target GUID points at deleted DN %r" % str(correct_dn))
+ if not self.confirm_all('Remove stale DN link?', 'remove_plausible_deleted_DN_links'):
+ self.report("Not removing")
+ return
+
m = ldb.Message()
m.dn = dn
m['old_value'] = ldb.MessageElement(val, ldb.FLAG_MOD_DELETE, attrname)
@@ -460,29 +544,73 @@ newSuperior: %s""" % (str(from_dn), str(to_rdn), str(to_base)))
"Failed to remove deleted DN attribute %s" % attrname):
self.report("Removed deleted DN on attribute %s" % attrname)
- def err_missing_dn_GUID(self, dn, attrname, val, dsdb_dn):
- """handle a missing target DN (both GUID and DN string form are missing)"""
+ def err_missing_target_dn_or_GUID(self, dn, attrname, val, dsdb_dn):
+ """handle a missing target DN (if specified, GUID form can't be found,
+ and otherwise DN string form can't be found)"""
# check if its a backlink
linkID, _ = self.get_attr_linkID_and_reverse_name(attrname)
if (linkID & 1 == 0) and str(dsdb_dn).find('\\0ADEL') == -1:
- self.report("Not removing dangling forward link")
- return
- self.err_deleted_dn(dn, attrname, val, dsdb_dn, dsdb_dn)
- def err_incorrect_dn_GUID(self, dn, attrname, val, dsdb_dn, errstr):
+ linkID, reverse_link_name \
+ = self.get_attr_linkID_and_reverse_name(attrname)
+ if reverse_link_name is not None:
+ self.report("WARNING: no target object found for GUID "
+ "component for one-way forward link "
+ "%s in object "
+ "%s - %s" % (attrname, dn, val))
+ self.report("Not removing dangling forward link")
+ return 0
+
+ nc_root = self.samdb.get_nc_root(dn)
+ target_nc_root = self.samdb.get_nc_root(dsdb_dn.dn)
+ if nc_root != target_nc_root:
+ # We don't bump the error count as Samba produces these
+ # in normal operation
+ self.report("WARNING: no target object found for GUID "
+ "component for cross-partition link "
+ "%s in object "
+ "%s - %s" % (attrname, dn, val))
+ self.report("Not removing dangling one-way "
+ "cross-partition link "
+ "(we might be mid-replication)")
+ return 0
+
+ # Due to our link handling one-way links pointing to
+ # missing objects are plausible.
+ #
+ # We don't bump the error count as Samba produces these
+ # in normal operation
+ self.report("WARNING: no target object found for GUID "
+ "component for DN value %s in object "
+ "%s - %s" % (attrname, dn, val))
+ self.err_deleted_dn(dn, attrname, val,
+ dsdb_dn, dsdb_dn, True)
+ return 0
+
+ # We bump the error count here, as we should have deleted this
+ self.report("ERROR: no target object found for GUID "
+ "component for link %s in object "
+ "%s - %s" % (attrname, dn, val))
+ self.err_deleted_dn(dn, attrname, val, dsdb_dn, dsdb_dn, False)
+ return 1
+
+ def err_missing_dn_GUID_component(self, dn, attrname, val, dsdb_dn, errstr):
"""handle a missing GUID extended DN component"""
self.report("ERROR: %s component for %s in object %s - %s" % (errstr, attrname, dn, val))
- controls=["extended_dn:1:1", "show_recycled:1"]
+ controls = ["extended_dn:1:1", "show_recycled:1"]
try:
res = self.samdb.search(base=str(dsdb_dn.dn), scope=ldb.SCOPE_BASE,
attrs=[], controls=controls)
- except ldb.LdbError, (enum, estr):
+ except ldb.LdbError as e7:
+ (enum, estr) = e7.args
self.report("unable to find object for DN %s - (%s)" % (dsdb_dn.dn, estr))
- self.err_missing_dn_GUID(dn, attrname, val, dsdb_dn)
+ if enum != ldb.ERR_NO_SUCH_OBJECT:
+ raise
+ self.err_missing_target_dn_or_GUID(dn, attrname, val, dsdb_dn)
return
if len(res) == 0:
self.report("unable to find object for DN %s" % dsdb_dn.dn)
- self.err_missing_dn_GUID(dn, attrname, val, dsdb_dn)
+ self.err_missing_target_dn_or_GUID(dn, attrname, val, dsdb_dn)
return
dsdb_dn.dn = res[0].dn
@@ -501,7 +629,7 @@ newSuperior: %s""" % (str(from_dn), str(to_rdn), str(to_base)))
def err_incorrect_binary_dn(self, dn, attrname, val, dsdb_dn, errstr):
"""handle an incorrect binary DN component"""
self.report("ERROR: %s binary component for %s in object %s - %s" % (errstr, attrname, dn, val))
- controls=["extended_dn:1:1", "show_recycled:1"]
+ controls = ["extended_dn:1:1", "show_recycled:1"]
if not self.confirm_all('Change DN to %s?' % str(dsdb_dn), 'fix_all_binary_dn'):
self.report("Not fixing %s" % errstr)
@@ -515,6 +643,24 @@ newSuperior: %s""" % (str(from_dn), str(to_rdn), str(to_base)))
"Failed to fix %s on attribute %s" % (errstr, attrname)):
self.report("Fixed %s on attribute %s" % (errstr, attrname))
+ def err_dn_string_component_old(self, dn, attrname, val, dsdb_dn, correct_dn):
+ """handle a DN string being incorrect"""
+ self.report("NOTE: old (due to rename or delete) DN string component for %s in object %s - %s" % (attrname, dn, val))
+ dsdb_dn.dn = correct_dn
+
+ if not self.confirm_all('Change DN to %s?' % str(dsdb_dn),
+ 'fix_all_old_dn_string_component_mismatch'):
+ self.report("Not fixing old string component")
+ return
+ m = ldb.Message()
+ m.dn = dn
+ m['old_value'] = ldb.MessageElement(val, ldb.FLAG_MOD_DELETE, attrname)
+ m['new_value'] = ldb.MessageElement(str(dsdb_dn), ldb.FLAG_MOD_ADD, attrname)
+ if self.do_modify(m, ["show_recycled:1",
+ "local_oid:%s:1" % dsdb.DSDB_CONTROL_DBCHECK_FIX_LINK_DN_NAME],
+ "Failed to fix old DN string on attribute %s" % (attrname)):
+ self.report("Fixed old DN string on attribute %s" % (attrname))
+
def err_dn_component_target_mismatch(self, dn, attrname, val, dsdb_dn, correct_dn, mismatch_type):
"""handle a DN string being incorrect"""
self.report("ERROR: incorrect DN %s component for %s in object %s - %s" % (mismatch_type, attrname, dn, val))
@@ -568,10 +714,9 @@ newSuperior: %s""" % (str(from_dn), str(to_rdn), str(to_base)))
self.report("Not fixing missing backlink %s" % backlink_name)
return
m = ldb.Message()
- m.dn = obj.dn
- m['old_value'] = ldb.MessageElement(val, ldb.FLAG_MOD_DELETE, attrname)
- m['new_value'] = ldb.MessageElement(val, ldb.FLAG_MOD_ADD, attrname)
- if self.do_modify(m, ["show_recycled:1"],
+ m.dn = target_dn
+ m['new_value'] = ldb.MessageElement(val, ldb.FLAG_MOD_ADD, backlink_name)
+ if self.do_modify(m, ["show_recycled:1", "relax:0"],
"Failed to fix missing backlink %s" % backlink_name):
self.report("Fixed missing backlink %s" % (backlink_name))
@@ -589,18 +734,44 @@ newSuperior: %s""" % (str(from_dn), str(to_rdn), str(to_base)))
"Failed to fix incorrect RMD_FLAGS %u" % rmd_flags):
self.report("Fixed incorrect RMD_FLAGS %u" % (rmd_flags))
- def err_orphaned_backlink(self, obj, attrname, val, link_name, target_dn):
+ def err_orphaned_backlink(self, obj_dn, backlink_attr, backlink_val,
+ target_dn, forward_attr, forward_syntax,
+ check_duplicates=True):
'''handle a orphaned backlink value'''
- self.report("ERROR: orphaned backlink attribute '%s' in %s for link %s in %s" % (attrname, obj.dn, link_name, target_dn))
- if not self.confirm_all('Remove orphaned backlink %s' % link_name, 'fix_all_orphaned_backlinks'):
- self.report("Not removing orphaned backlink %s" % link_name)
+ if check_duplicates is True and self.has_duplicate_links(target_dn, forward_attr, forward_syntax):
+ self.report("WARNING: Keep orphaned backlink attribute " + \
+ "'%s' in '%s' for link '%s' in '%s'" % (
+ backlink_attr, obj_dn, forward_attr, target_dn))
+ return
+ self.report("ERROR: orphaned backlink attribute '%s' in %s for link %s in %s" % (backlink_attr, obj_dn, forward_attr, target_dn))
+ if not self.confirm_all('Remove orphaned backlink %s' % backlink_attr, 'fix_all_orphaned_backlinks'):
+ self.report("Not removing orphaned backlink %s" % backlink_attr)
return
m = ldb.Message()
- m.dn = obj.dn
- m['value'] = ldb.MessageElement(val, ldb.FLAG_MOD_DELETE, attrname)
+ m.dn = obj_dn
+ m['value'] = ldb.MessageElement(backlink_val, ldb.FLAG_MOD_DELETE, backlink_attr)
if self.do_modify(m, ["show_recycled:1", "relax:0"],
- "Failed to fix orphaned backlink %s" % link_name):
- self.report("Fixed orphaned backlink %s" % (link_name))
+ "Failed to fix orphaned backlink %s" % backlink_attr):
+ self.report("Fixed orphaned backlink %s" % (backlink_attr))
+
+ def err_recover_forward_links(self, obj, forward_attr, forward_vals):
+ '''handle a duplicate links value'''
+
+ self.report("RECHECK: 'Missing/Duplicate/Correct link' lines above for attribute '%s' in '%s'" % (forward_attr, obj.dn))
+
+ if not self.confirm_all("Commit fixes for (missing/duplicate) forward links in attribute '%s'" % forward_attr, 'recover_all_forward_links'):
+ self.report("Not fixing corrupted (missing/duplicate) forward links in attribute '%s' of '%s'" % (
+ forward_attr, obj.dn))
+ return
+ m = ldb.Message()
+ m.dn = obj.dn
+ m['value'] = ldb.MessageElement(forward_vals, ldb.FLAG_MOD_REPLACE, forward_attr)
+ if self.do_modify(m, ["local_oid:%s:1" % dsdb.DSDB_CONTROL_DBCHECK_FIX_DUPLICATE_LINKS],
+ "Failed to fix duplicate links in attribute '%s'" % forward_attr):
+ self.report("Fixed duplicate links in attribute '%s'" % (forward_attr))
+ duplicate_cache_key = "%s:%s" % (str(obj.dn), forward_attr)
+ assert duplicate_cache_key in self.duplicate_link_cache
+ self.duplicate_link_cache[duplicate_cache_key] = False
def err_no_fsmoRoleOwner(self, obj):
'''handle a missing fSMORoleOwner'''
@@ -626,7 +797,7 @@ newSuperior: %s""" % (str(from_dn), str(to_rdn), str(to_base)))
self.report('Not moving object %s into LostAndFound' % (obj.dn))
return
- keep_transaction = True
+ keep_transaction = False
self.samdb.transaction_start()
try:
nc_root = self.samdb.get_nc_root(obj.dn);
@@ -747,30 +918,279 @@ newSuperior: %s""" % (str(from_dn), str(to_rdn), str(to_base)))
controls=["show_deleted:0", "extended_dn:0", "reveal_internals:0"])
syntax_oid = self.samdb_schema.get_syntax_oid_from_lDAPDisplayName(attrname)
for val in res[0][attrname]:
- dsdb_dn = dsdb_Dn(self.samdb, val, syntax_oid)
+ dsdb_dn = dsdb_Dn(self.samdb, val.decode('utf8'), syntax_oid)
guid2 = dsdb_dn.dn.get_extended_component("GUID")
if guid == guid2:
return dsdb_dn
return None
+ def check_duplicate_links(self, obj, forward_attr, forward_syntax, forward_linkID, backlink_attr):
+ '''check a linked values for duplicate forward links'''
+ error_count = 0
+
+ duplicate_dict = dict()
+ unique_dict = dict()
+
+ # Only forward links can have this problem
+ if forward_linkID & 1:
+ # If we got the reverse, skip it
+ return (error_count, duplicate_dict, unique_dict)
+
+ if backlink_attr is None:
+ return (error_count, duplicate_dict, unique_dict)
+
+ duplicate_cache_key = "%s:%s" % (str(obj.dn), forward_attr)
+ if duplicate_cache_key not in self.duplicate_link_cache:
+ self.duplicate_link_cache[duplicate_cache_key] = False
+
+ for val in obj[forward_attr]:
+ dsdb_dn = dsdb_Dn(self.samdb, val.decode('utf8'), forward_syntax)
+
+ # all DNs should have a GUID component
+ guid = dsdb_dn.dn.get_extended_component("GUID")
+ if guid is None:
+ continue
+ guidstr = str(misc.GUID(guid))
+ keystr = guidstr + dsdb_dn.prefix
+ if keystr not in unique_dict:
+ unique_dict[keystr] = dsdb_dn
+ continue
+ error_count += 1
+ if keystr not in duplicate_dict:
+ duplicate_dict[keystr] = dict()
+ duplicate_dict[keystr]["keep"] = None
+ duplicate_dict[keystr]["delete"] = list()
+
+ # Now check for the highest RMD_VERSION
+ v1 = int(unique_dict[keystr].dn.get_extended_component("RMD_VERSION"))
+ v2 = int(dsdb_dn.dn.get_extended_component("RMD_VERSION"))
+ if v1 > v2:
+ duplicate_dict[keystr]["keep"] = unique_dict[keystr]
+ duplicate_dict[keystr]["delete"].append(dsdb_dn)
+ continue
+ if v1 < v2:
+ duplicate_dict[keystr]["keep"] = dsdb_dn
+ duplicate_dict[keystr]["delete"].append(unique_dict[keystr])
+ unique_dict[keystr] = dsdb_dn
+ continue
+ # Fallback to the highest RMD_LOCAL_USN
+ u1 = int(unique_dict[keystr].dn.get_extended_component("RMD_LOCAL_USN"))
+ u2 = int(dsdb_dn.dn.get_extended_component("RMD_LOCAL_USN"))
+ if u1 >= u2:
+ duplicate_dict[keystr]["keep"] = unique_dict[keystr]
+ duplicate_dict[keystr]["delete"].append(dsdb_dn)
+ continue
+ duplicate_dict[keystr]["keep"] = dsdb_dn
+ duplicate_dict[keystr]["delete"].append(unique_dict[keystr])
+ unique_dict[keystr] = dsdb_dn
+
+ if error_count != 0:
+ self.duplicate_link_cache[duplicate_cache_key] = True
+
+ return (error_count, duplicate_dict, unique_dict)
+
+ def has_duplicate_links(self, dn, forward_attr, forward_syntax):
+ '''check a linked values for duplicate forward links'''
+ error_count = 0
+
+ duplicate_cache_key = "%s:%s" % (str(dn), forward_attr)
+ if duplicate_cache_key in self.duplicate_link_cache:
+ return self.duplicate_link_cache[duplicate_cache_key]
+
+ forward_linkID, backlink_attr = self.get_attr_linkID_and_reverse_name(forward_attr)
+
+ attrs = [forward_attr]
+ controls = ["extended_dn:1:1", "reveal_internals:0"]
+
+ # check its the right GUID
+ try:
+ res = self.samdb.search(base=str(dn), scope=ldb.SCOPE_BASE,
+ attrs=attrs, controls=controls)
+ except ldb.LdbError as e8:
+ (enum, estr) = e8.args
+ if enum != ldb.ERR_NO_SUCH_OBJECT:
+ raise
+
+ return False
+
+ obj = res[0]
+ error_count, duplicate_dict, unique_dict = \
+ self.check_duplicate_links(obj, forward_attr, forward_syntax, forward_linkID, backlink_attr)
+
+ if duplicate_cache_key in self.duplicate_link_cache:
+ return self.duplicate_link_cache[duplicate_cache_key]
+
+ return False
+
+ def find_missing_forward_links_from_backlinks(self, obj,
+ forward_attr,
+ forward_syntax,
+ backlink_attr,
+ forward_unique_dict):
+ '''Find all backlinks linking to obj_guid_str not already in forward_unique_dict'''
+ missing_forward_links = []
+ error_count = 0
+
+ if backlink_attr is None:
+ return (missing_forward_links, error_count)
+
+ if forward_syntax != ldb.SYNTAX_DN:
+ self.report("Not checking for missing forward links for syntax: %s",
+ forward_syntax)
+ return (missing_forward_links, error_count)
+
+ if "sortedLinks" in self.compatibleFeatures:
+ self.report("Not checking for missing forward links because the db " + \
+ "has the sortedLinks feature")
+ return (missing_forward_links, error_count)
+
+ try:
+ obj_guid = obj['objectGUID'][0]
+ obj_guid_str = str(ndr_unpack(misc.GUID, obj_guid))
+ filter = "(%s=)" % (backlink_attr, obj_guid_str)
+
+ res = self.samdb.search(expression=filter,
+ scope=ldb.SCOPE_SUBTREE, attrs=["objectGUID"],
+ controls=["extended_dn:1:1",
+ "search_options:1:2",
+ "paged_results:1:1000"])
+ except ldb.LdbError as e9:
+ (enum, estr) = e9.args
+ raise
+
+ for r in res:
+ target_dn = dsdb_Dn(self.samdb, r.dn.extended_str(), forward_syntax)
+
+ guid = target_dn.dn.get_extended_component("GUID")
+ guidstr = str(misc.GUID(guid))
+ if guidstr in forward_unique_dict:
+ continue
+
+ # A valid forward link looks like this:
+ #
+ # ;
+ # ;
+ # ;
+ # ;
+ # ;
+ # ;
+ # ;
+ # ;
+ # ;
+ # CN=unsorted-u8,CN=Users,DC=release-4-5-0-pre1,DC=samba,DC=corp
+ #
+ # Note that versions older than Samba 4.8 create
+ # links with RMD_VERSION=0.
+ #
+ # Try to get the local_usn and time from objectClass
+ # if possible and fallback to any other one.
+ repl = ndr_unpack(drsblobs.replPropertyMetaDataBlob,
+ obj['replPropertyMetadata'][0])
+ for o in repl.ctr.array:
+ local_usn = o.local_usn
+ t = o.originating_change_time
+ if o.attid == drsuapi.DRSUAPI_ATTID_objectClass:
+ break
+
+ # We use a magic invocationID for restoring missing
+ # forward links to recover from bug #13228.
+ # This should allow some more future magic to fix the
+ # problem.
+ #
+ # It also means it looses the conflict resolution
+ # against almost every real invocation, if the
+ # version is also 0.
+ originating_invocid = misc.GUID("ffffffff-4700-4700-4700-000000b13228")
+ originating_usn = 1
+
+ rmd_addtime = t
+ rmd_changetime = t
+ rmd_flags = 0
+ rmd_invocid = originating_invocid
+ rmd_originating_usn = originating_usn
+ rmd_local_usn = local_usn
+ rmd_version = 0
+
+ target_dn.dn.set_extended_component("RMD_ADDTIME", str(rmd_addtime))
+ target_dn.dn.set_extended_component("RMD_CHANGETIME", str(rmd_changetime))
+ target_dn.dn.set_extended_component("RMD_FLAGS", str(rmd_flags))
+ target_dn.dn.set_extended_component("RMD_INVOCID", ndr_pack(rmd_invocid))
+ target_dn.dn.set_extended_component("RMD_ORIGINATING_USN", str(rmd_originating_usn))
+ target_dn.dn.set_extended_component("RMD_LOCAL_USN", str(rmd_local_usn))
+ target_dn.dn.set_extended_component("RMD_VERSION", str(rmd_version))
+
+ error_count += 1
+ missing_forward_links.append(target_dn)
+
+ return (missing_forward_links, error_count)
+
def check_dn(self, obj, attrname, syntax_oid):
'''check a DN attribute for correctness'''
error_count = 0
obj_guid = obj['objectGUID'][0]
+ linkID, reverse_link_name = self.get_attr_linkID_and_reverse_name(attrname)
+ if reverse_link_name is not None:
+ reverse_syntax_oid = self.samdb_schema.get_syntax_oid_from_lDAPDisplayName(reverse_link_name)
+ else:
+ reverse_syntax_oid = None
+
+ error_count, duplicate_dict, unique_dict = \
+ self.check_duplicate_links(obj, attrname, syntax_oid, linkID, reverse_link_name)
+
+ if len(duplicate_dict) != 0:
+
+ missing_forward_links, missing_error_count = \
+ self.find_missing_forward_links_from_backlinks(obj,
+ attrname, syntax_oid,
+ reverse_link_name,
+ unique_dict)
+ error_count += missing_error_count
+
+ forward_links = [dn for dn in unique_dict.values()]
+
+ if missing_error_count != 0:
+ self.report("ERROR: Missing and duplicate forward link values for attribute '%s' in '%s'" % (
+ attrname, obj.dn))
+ else:
+ self.report("ERROR: Duplicate forward link values for attribute '%s' in '%s'" % (attrname, obj.dn))
+ for m in missing_forward_links:
+ self.report("Missing link '%s'" % (m))
+ if not self.confirm_all("Schedule readding missing forward link for attribute %s" % attrname,
+ 'fix_all_missing_forward_links'):
+ self.err_orphaned_backlink(m.dn, reverse_link_name,
+ obj.dn.extended_str(), obj.dn,
+ attrname, syntax_oid,
+ check_duplicates=False)
+ continue
+ forward_links += [m]
+ for keystr in duplicate_dict.keys():
+ d = duplicate_dict[keystr]
+ for dd in d["delete"]:
+ self.report("Duplicate link '%s'" % dd)
+ self.report("Correct link '%s'" % d["keep"])
+
+ # We now construct the sorted dn values.
+ # They're sorted by the objectGUID of the target
+ # See dsdb_Dn.__cmp__()
+ vals = [str(dn) for dn in sorted(forward_links)]
+ self.err_recover_forward_links(obj, attrname, vals)
+ # We should continue with the fixed values
+ obj[attrname] = ldb.MessageElement(vals, 0, attrname)
+
for val in obj[attrname]:
- dsdb_dn = dsdb_Dn(self.samdb, val, syntax_oid)
+ dsdb_dn = dsdb_Dn(self.samdb, val.decode('utf8'), syntax_oid)
# all DNs should have a GUID component
guid = dsdb_dn.dn.get_extended_component("GUID")
if guid is None:
error_count += 1
- self.err_incorrect_dn_GUID(obj.dn, attrname, val, dsdb_dn,
- "missing GUID")
+ self.err_missing_dn_GUID_component(obj.dn, attrname, val, dsdb_dn,
+ "missing GUID")
continue
guidstr = str(misc.GUID(guid))
- attrs = ['isDeleted']
+ attrs = ['isDeleted', 'replPropertyMetaData']
if (str(attrname).lower() == 'msds-hasinstantiatedncs') and (obj.dn == self.ntds_dsa):
fixing_msDS_HasInstantiatedNCs = True
@@ -778,7 +1198,6 @@ newSuperior: %s""" % (str(from_dn), str(to_rdn), str(to_base)))
else:
fixing_msDS_HasInstantiatedNCs = False
- linkID, reverse_link_name = self.get_attr_linkID_and_reverse_name(attrname)
if reverse_link_name is not None:
attrs.append(reverse_link_name)
@@ -787,10 +1206,17 @@ newSuperior: %s""" % (str(from_dn), str(to_rdn), str(to_base)))
res = self.samdb.search(base="" % guidstr, scope=ldb.SCOPE_BASE,
attrs=attrs, controls=["extended_dn:1:1", "show_recycled:1",
"reveal_internals:0"
- ])
- except ldb.LdbError, (enum, estr):
- error_count += 1
- self.err_incorrect_dn_GUID(obj.dn, attrname, val, dsdb_dn, "incorrect GUID")
+ ])
+ except ldb.LdbError as e3:
+ (enum, estr) = e3.args
+ if enum != ldb.ERR_NO_SUCH_OBJECT:
+ raise
+
+ # We don't always want to
+ error_count += self.err_missing_target_dn_or_GUID(obj.dn,
+ attrname,
+ val,
+ dsdb_dn)
continue
if fixing_msDS_HasInstantiatedNCs:
@@ -798,7 +1224,7 @@ newSuperior: %s""" % (str(from_dn), str(to_rdn), str(to_base)))
dsdb_dn.binary = "%08X" % int(res[0]['instanceType'][0])
if str(dsdb_dn) != val:
- error_count +=1
+ error_count += 1
self.err_incorrect_binary_dn(obj.dn, attrname, val, dsdb_dn, "incorrect instanceType part of Binary DN")
continue
@@ -819,15 +1245,47 @@ newSuperior: %s""" % (str(from_dn), str(to_rdn), str(to_base)))
# the target DN is not allowed to be deleted, unless the target DN is the
# special Deleted Objects container
error_count += 1
- self.err_deleted_dn(obj.dn, attrname, val, dsdb_dn, res[0].dn)
+ local_usn = dsdb_dn.dn.get_extended_component("RMD_LOCAL_USN")
+ if local_usn:
+ if 'replPropertyMetaData' in res[0]:
+ repl = ndr_unpack(drsblobs.replPropertyMetaDataBlob,
+ str(res[0]['replPropertyMetadata']))
+ found_data = False
+ for o in repl.ctr.array:
+ if o.attid == drsuapi.DRSUAPI_ATTID_isDeleted:
+ deleted_usn = o.local_usn
+ if deleted_usn >= int(local_usn):
+ # If the object was deleted after the link
+ # was last modified then, clean it up here
+ found_data = True
+ break
+
+ if found_data:
+ self.err_deleted_dn(obj.dn, attrname,
+ val, dsdb_dn, res[0].dn, True)
+ continue
+
+ self.err_deleted_dn(obj.dn, attrname, val, dsdb_dn, res[0].dn, False)
continue
- # check the DN matches in string form
- if str(res[0].dn) != str(dsdb_dn.dn):
- error_count += 1
- self.err_dn_component_target_mismatch(obj.dn, attrname, val, dsdb_dn,
- res[0].dn, "string")
- continue
+ # We should not check for incorrect
+ # components on deleted links, as these are allowed to
+ # go stale (we just need the GUID, not the name)
+ rmd_blob = dsdb_dn.dn.get_extended_component("RMD_FLAGS")
+ rmd_flags = 0
+ if rmd_blob is not None:
+ rmd_flags = int(rmd_blob)
+
+ # assert the DN matches in string form, where a reverse
+ # link exists, otherwise (below) offer to fix it as a non-error.
+ # The string form is essentially only kept for forensics,
+ # as we always re-resolve by GUID in normal operations.
+ if not rmd_flags & 1 and reverse_link_name is not None:
+ if str(res[0].dn) != str(dsdb_dn.dn):
+ error_count += 1
+ self.err_dn_component_target_mismatch(obj.dn, attrname, val, dsdb_dn,
+ res[0].dn, "string")
+ continue
if res[0].dn.get_extended_component("GUID") != dsdb_dn.dn.get_extended_component("GUID"):
error_count += 1
@@ -841,25 +1299,112 @@ newSuperior: %s""" % (str(from_dn), str(to_rdn), str(to_base)))
res[0].dn, "SID")
continue
+ # Only for non-links, not even forward-only links
+ # (otherwise this breaks repl_meta_data):
+ #
+ # Now we have checked the GUID and SID, offer to fix old
+ # DN strings as a non-error (DNs, not links so no
+ # backlink). Samba does not maintain this string
+ # otherwise, so we don't increment error_count.
+ if reverse_link_name is None:
+ if linkID == 0 and str(res[0].dn) != str(dsdb_dn.dn):
+ # Pass in the old/bad DN without the part,
+ # otherwise the LDB code will correct it on the way through
+ # (Note: we still want to preserve the DSDB DN prefix in the
+ # case of binary DNs)
+ bad_dn = dsdb_dn.prefix + dsdb_dn.dn.get_linearized()
+ self.err_dn_string_component_old(obj.dn, attrname, bad_dn,
+ dsdb_dn, res[0].dn)
+ continue
# check the reverse_link is correct if there should be one
- if reverse_link_name is not None:
- match_count = 0
- if reverse_link_name in res[0]:
- for v in res[0][reverse_link_name]:
- v_guid = dsdb_Dn(self.samdb, v).dn.get_extended_component("GUID")
- if v_guid == obj_guid:
- match_count += 1
- if match_count != 1:
- if target_is_deleted:
- error_count += 1
- if linkID & 1:
- self.err_missing_backlink(obj, attrname, val, reverse_link_name, dsdb_dn.dn)
- else:
- self.err_orphaned_backlink(obj, attrname, val, reverse_link_name, dsdb_dn.dn)
+ match_count = 0
+ if reverse_link_name in res[0]:
+ for v in res[0][reverse_link_name]:
+ v_dn = dsdb_Dn(self.samdb, v.decode('utf8'))
+ v_guid = v_dn.dn.get_extended_component("GUID")
+ v_blob = v_dn.dn.get_extended_component("RMD_FLAGS")
+ v_rmd_flags = 0
+ if v_blob is not None:
+ v_rmd_flags = int(v_blob)
+ if v_rmd_flags & 1:
+ continue
+ if v_guid == obj_guid:
+ match_count += 1
+
+ if match_count != 1:
+ if syntax_oid == dsdb.DSDB_SYNTAX_BINARY_DN or reverse_syntax_oid == dsdb.DSDB_SYNTAX_BINARY_DN:
+ if not linkID & 1:
+ # Forward binary multi-valued linked attribute
+ forward_count = 0
+ for w in obj[attrname]:
+ w_guid = dsdb_Dn(self.samdb, w.decode('utf8')).dn.get_extended_component("GUID")
+ if w_guid == guid:
+ forward_count += 1
+
+ if match_count == forward_count:
+ continue
+ expected_count = 0
+ for v in obj[attrname]:
+ v_dn = dsdb_Dn(self.samdb, v.decode('utf8'))
+ v_guid = v_dn.dn.get_extended_component("GUID")
+ v_blob = v_dn.dn.get_extended_component("RMD_FLAGS")
+ v_rmd_flags = 0
+ if v_blob is not None:
+ v_rmd_flags = int(v_blob)
+ if v_rmd_flags & 1:
continue
+ if v_guid == guid:
+ expected_count += 1
+ if match_count == expected_count:
+ continue
+
+ diff_count = expected_count - match_count
+
+ if linkID & 1:
+ # If there's a backward link on binary multi-valued linked attribute,
+ # let the check on the forward link remedy the value.
+ # UNLESS, there is no forward link detected.
+ if match_count == 0:
+ error_count += 1
+ self.err_orphaned_backlink(obj.dn, attrname,
+ val, dsdb_dn.dn,
+ reverse_link_name,
+ reverse_syntax_oid)
+ continue
+ # Only warn here and let the forward link logic fix it.
+ self.report("WARNING: Link (back) mismatch for '%s' (%d) on '%s' to '%s' (%d) on '%s'" % (
+ attrname, expected_count, str(obj.dn),
+ reverse_link_name, match_count, str(dsdb_dn.dn)))
+ continue
+ assert not target_is_deleted
+
+ self.report("ERROR: Link (forward) mismatch for '%s' (%d) on '%s' to '%s' (%d) on '%s'" % (
+ attrname, expected_count, str(obj.dn),
+ reverse_link_name, match_count, str(dsdb_dn.dn)))
+
+ # Loop until the difference between the forward and
+ # the backward links is resolved.
+ while diff_count != 0:
+ error_count += 1
+ if diff_count > 0:
+ if match_count > 0 or diff_count > 1:
+ # TODO no method to fix these right now
+ self.report("ERROR: Can't fix missing "
+ "multi-valued backlinks on %s" % str(dsdb_dn.dn))
+ break
+ self.err_missing_backlink(obj, attrname,
+ obj.dn.extended_str(),
+ reverse_link_name,
+ dsdb_dn.dn)
+ diff_count -= 1
+ else:
+ self.err_orphaned_backlink(res[0].dn, reverse_link_name,
+ obj.dn.extended_str(), obj.dn,
+ attrname, syntax_oid)
+ diff_count += 1
return error_count
@@ -905,11 +1450,14 @@ newSuperior: %s""" % (str(from_dn), str(to_rdn), str(to_base)))
return (set_att, list_attid, wrong_attids)
- def fix_metadata(self, dn, attr):
+ def fix_metadata(self, obj, attr):
'''re-write replPropertyMetaData elements for a single attribute for a
object. This is used to fix missing replPropertyMetaData elements'''
+ guid_str = str(ndr_unpack(misc.GUID, obj['objectGUID'][0]))
+ dn = ldb.Dn(self.samdb, "" % guid_str)
res = self.samdb.search(base = dn, scope=ldb.SCOPE_BASE, attrs = [attr],
- controls = ["search_options:1:2", "show_recycled:1"])
+ controls = ["search_options:1:2",
+ "show_recycled:1"])
msg = res[0]
nmsg = ldb.Message()
nmsg.dn = dn
@@ -1035,7 +1583,7 @@ newSuperior: %s""" % (str(from_dn), str(to_rdn), str(to_base)))
cls = None
try:
cls = obj["objectClass"][-1]
- except KeyError, e:
+ except KeyError as e:
pass
if cls is None:
@@ -1221,7 +1769,7 @@ newSuperior: %s""" % (str(from_dn), str(to_rdn), str(to_base)))
# the correct values are above 0x80000000) values first and
# remove the 'second' value we see.
for o in reversed(ctr.array):
- print "%s: 0x%08x" % (dn, o.attid)
+ print("%s: 0x%08x" % (dn, o.attid))
att = self.samdb_schema.get_lDAPDisplayName_by_attid(o.attid)
if att.lower() in set_att:
self.report('ERROR: duplicate attributeID values for %s in %s on %s\n' % (att, attr, dn))
@@ -1291,9 +1839,9 @@ newSuperior: %s""" % (str(from_dn), str(to_rdn), str(to_base)))
nmsg.dn = dn
nmsg[attr] = ldb.MessageElement(replBlob, ldb.FLAG_MOD_REPLACE, attr)
if self.do_modify(nmsg, ["local_oid:%s:0" % dsdb.DSDB_CONTROL_DBCHECK_MODIFY_RO_REPLICA,
- "local_oid:1.3.6.1.4.1.7165.4.3.14:0",
- "local_oid:1.3.6.1.4.1.7165.4.3.25:0"],
- "Failed to fix attribute %s" % attr):
+ "local_oid:1.3.6.1.4.1.7165.4.3.14:0",
+ "local_oid:1.3.6.1.4.1.7165.4.3.25:0"],
+ "Failed to fix attribute %s" % attr):
self.report("Fixed attribute '%s' of '%s'\n" % (attr, dn))
@@ -1355,6 +1903,23 @@ newSuperior: %s""" % (str(from_dn), str(to_rdn), str(to_base)))
"Failed to fix Deleted Objects container %s" % dn):
self.report("Fixed Deleted Objects container '%s'\n" % (dn))
+ def err_replica_locations(self, obj, cross_ref, attr):
+ nmsg = ldb.Message()
+ nmsg.dn = cross_ref
+ target = self.samdb.get_dsServiceName()
+
+ if self.samdb.am_rodc():
+ self.report('Not fixing %s for the RODC' % (attr, obj.dn))
+ return
+
+ if not self.confirm_all('Add yourself to the replica locations for %s?'
+ % (obj.dn), 'fix_replica_locations'):
+ self.report('Not fixing missing/incorrect attributes on %s\n' % (obj.dn))
+ return
+
+ nmsg[attr] = ldb.MessageElement(target, ldb.FLAG_MOD_ADD, attr)
+ if self.do_modify(nmsg, [], "Failed to add %s for %s" % (attr, obj.dn)):
+ self.report("Fixed %s for %s" % (attr, obj.dn))
def is_fsmo_role(self, dn):
if dn == self.samdb.domain_dn:
@@ -1377,7 +1942,8 @@ newSuperior: %s""" % (str(from_dn), str(to_rdn), str(to_base)))
instancetype |= dsdb.INSTANCE_TYPE_IS_NC_HEAD
try:
self.samdb.search(base=dn.parent(), scope=ldb.SCOPE_BASE, attrs=[], controls=["show_recycled:1"])
- except ldb.LdbError, (enum, estr):
+ except ldb.LdbError as e4:
+ (enum, estr) = e4.args
if enum != ldb.ERR_NO_SUCH_OBJECT:
raise
else:
@@ -1416,10 +1982,21 @@ newSuperior: %s""" % (str(from_dn), str(to_rdn), str(to_base)))
attrs.append(dn.get_rdn_name())
attrs.append("isDeleted")
attrs.append("systemFlags")
+ need_replPropertyMetaData = False
if '*' in attrs:
- attrs.append("replPropertyMetaData")
+ need_replPropertyMetaData = True
else:
- attrs.append("objectGUID")
+ for a in attrs:
+ linkID, _ = self.get_attr_linkID_and_reverse_name(a)
+ if linkID == 0:
+ continue
+ if linkID & 1:
+ continue
+ need_replPropertyMetaData = True
+ break
+ if need_replPropertyMetaData:
+ attrs.append("replPropertyMetaData")
+ attrs.append("objectGUID")
try:
sd_flags = 0
@@ -1437,7 +2014,8 @@ newSuperior: %s""" % (str(from_dn), str(to_rdn), str(to_base)))
"reveal_internals:0",
],
attrs=attrs)
- except ldb.LdbError, (enum, estr):
+ except ldb.LdbError as e10:
+ (enum, estr) = e10.args
if enum == ldb.ERR_NO_SUCH_OBJECT:
if self.in_transaction:
self.report("ERROR: Object %s disappeared during check" % dn)
@@ -1519,7 +2097,7 @@ newSuperior: %s""" % (str(from_dn), str(to_rdn), str(to_base)))
if len(set_attrs_from_md) < len(list_attid_from_md) \
or len(wrong_attids) > 0 \
or sorted(list_attid_from_md) != list_attid_from_md:
- error_count +=1
+ error_count += 1
self.err_replmetadata_incorrect_attid(dn, attrname, obj[attrname], wrong_attids)
else:
@@ -1633,7 +2211,7 @@ newSuperior: %s""" % (str(from_dn), str(to_rdn), str(to_base)))
# special handling for some specific attribute types
try:
syntax_oid = self.samdb_schema.get_syntax_oid_from_lDAPDisplayName(attrname)
- except Exception, msg:
+ except Exception as msg:
self.err_unknown_attribute(obj, attrname)
error_count += 1
continue
@@ -1646,8 +2224,8 @@ newSuperior: %s""" % (str(from_dn), str(to_rdn), str(to_base)))
and not linkID):
set_attrs_seen.add(str(attrname).lower())
- if syntax_oid in [ dsdb.DSDB_SYNTAX_BINARY_DN, dsdb.DSDB_SYNTAX_OR_NAME,
- dsdb.DSDB_SYNTAX_STRING_DN, ldb.SYNTAX_DN ]:
+ if syntax_oid in [dsdb.DSDB_SYNTAX_BINARY_DN, dsdb.DSDB_SYNTAX_OR_NAME,
+ dsdb.DSDB_SYNTAX_STRING_DN, ldb.SYNTAX_DN]:
# it's some form of DN, do specialised checking on those
error_count += self.check_dn(obj, attrname, syntax_oid)
else:
@@ -1734,7 +2312,7 @@ newSuperior: %s""" % (str(from_dn), str(to_rdn), str(to_base)))
if not self.confirm_all("Fix missing replPropertyMetaData element '%s'" % att, 'fix_all_metadata'):
self.report("Not fixing missing replPropertyMetaData element '%s'" % att)
continue
- self.fix_metadata(dn, att)
+ self.fix_metadata(obj, att)
if self.is_fsmo_role(dn):
if "fSMORoleOwner" not in obj and ("*" in attrs or "fsmoroleowner" in map(str.lower, attrs)):
@@ -1745,7 +2323,8 @@ newSuperior: %s""" % (str(from_dn), str(to_rdn), str(to_base)))
if dn != self.samdb.get_root_basedn() and str(dn.parent()) not in self.dn_set:
res = self.samdb.search(base=dn.parent(), scope=ldb.SCOPE_BASE,
controls=["show_recycled:1", "show_deleted:1"])
- except ldb.LdbError, (enum, estr):
+ except ldb.LdbError as e11:
+ (enum, estr) = e11.args
if enum == ldb.ERR_NO_SUCH_OBJECT:
self.err_missing_parent(obj)
error_count += 1
@@ -1757,6 +2336,136 @@ newSuperior: %s""" % (str(from_dn), str(to_rdn), str(to_base)))
self.err_deleted_deleted_objects(obj)
error_count += 1
+ for (dns_part, msg) in self.dns_partitions:
+ if dn == dns_part and 'repsFrom' in obj:
+ location = "msDS-NC-Replica-Locations"
+ if self.samdb.am_rodc():
+ location = "msDS-NC-RO-Replica-Locations"
+
+ if location not in msg:
+ # There are no replica locations!
+ self.err_replica_locations(obj, msg.dn, location)
+ error_count += 1
+ continue
+
+ found = False
+ for loc in msg[location]:
+ if loc == self.samdb.get_dsServiceName():
+ found = True
+ if not found:
+ # This DC is not in the replica locations
+ self.err_replica_locations(obj, msg.dn, location)
+ error_count += 1
+
+ if dn == self.server_ref_dn:
+ # Check we have a valid RID Set
+ if "*" in attrs or "rIDSetReferences" in attrs:
+ if "rIDSetReferences" not in obj:
+ # NO RID SET reference
+ # We are RID master, allocate it.
+ error_count += 1
+
+ if self.is_rid_master:
+ # Allocate a RID Set
+ if self.confirm_all('Allocate the missing RID set for RID master?',
+ 'fix_missing_rid_set_master'):
+
+ # We don't have auto-transaction logic on
+ # extended operations, so we have to do it
+ # here.
+
+ self.samdb.transaction_start()
+
+ try:
+ self.samdb.create_own_rid_set()
+
+ except:
+ self.samdb.transaction_cancel()
+ raise
+
+ self.samdb.transaction_commit()
+
+
+ elif not self.samdb.am_rodc():
+ self.report("No RID Set found for this server: %s, and we are not the RID Master (so can not self-allocate)" % dn)
+
+
+ # Check some details of our own RID Set
+ if dn == self.rid_set_dn:
+ res = self.samdb.search(base=self.rid_set_dn, scope=ldb.SCOPE_BASE,
+ attrs=["rIDAllocationPool",
+ "rIDPreviousAllocationPool",
+ "rIDUsedPool",
+ "rIDNextRID"])
+ if "rIDAllocationPool" not in res[0]:
+ self.report("No rIDAllocationPool found in %s" % dn)
+ error_count += 1
+ else:
+ next_pool = int(res[0]["rIDAllocationPool"][0])
+
+ high = (0xFFFFFFFF00000000 & next_pool) >> 32
+ low = 0x00000000FFFFFFFF & next_pool
+
+ if high <= low:
+ self.report("Invalid RID set %d-%s, %d > %d!" % (low, high, low, high))
+ error_count += 1
+
+ if "rIDNextRID" in res[0]:
+ next_free_rid = int(res[0]["rIDNextRID"][0])
+ else:
+ next_free_rid = 0
+
+ if next_free_rid == 0:
+ next_free_rid = low
+ else:
+ next_free_rid += 1
+
+ # Check the remainder of this pool for conflicts. If
+ # ridalloc_allocate_rid() moves to a new pool, this
+ # will be above high, so we will stop.
+ while next_free_rid <= high:
+ sid = "%s-%d" % (self.samdb.get_domain_sid(), next_free_rid)
+ try:
+ res = self.samdb.search(base="" % sid, scope=ldb.SCOPE_BASE,
+ attrs=[])
+ except ldb.LdbError as e:
+ (enum, estr) = e.args
+ if enum != ldb.ERR_NO_SUCH_OBJECT:
+ raise
+ res = None
+ if res is not None:
+ self.report("SID %s for %s conflicts with our current RID set in %s" % (sid, res[0].dn, dn))
+ error_count += 1
+
+ if self.confirm_all('Fix conflict between SID %s and RID pool in %s by allocating a new RID?'
+ % (sid, dn),
+ 'fix_sid_rid_set_conflict'):
+ self.samdb.transaction_start()
+
+ # This will burn RIDs, which will move
+ # past the conflict. We then check again
+ # to see if the new RID conflicts, until
+ # the end of the current pool. We don't
+ # look at the next pool to avoid burning
+ # all RIDs in one go in some strange
+ # failure case.
+ try:
+ while True:
+ allocated_rid = self.samdb.allocate_rid()
+ if allocated_rid >= next_free_rid:
+ next_free_rid = allocated_rid + 1
+ break
+ except:
+ self.samdb.transaction_cancel()
+ raise
+
+ self.samdb.transaction_commit()
+ else:
+ break
+ else:
+ next_free_rid += 1
+
+
return error_count
################################################################
@@ -1776,14 +2485,14 @@ newSuperior: %s""" % (str(from_dn), str(to_rdn), str(to_base)))
# check that the dsServiceName is in GUID form
if not 'dsServiceName' in obj:
self.report('ERROR: dsServiceName missing in @ROOTDSE')
- return error_count+1
+ return error_count + 1
if not obj['dsServiceName'][0].startswith('