from samba.netcmd import CommandError
from samba.netcmd.fsmo import get_fsmo_roleowner
+# vals is a sequence of ldb.bytes objects which are a subclass
+# of 'byte' type in python3 and just a str type in python2, to
+# display as string these need to be converted to string via (str)
+# function in python3 but that may generate a UnicodeDecode error,
+# if so use repr instead. We need to at least try to get the 'str'
+# value if possible to allow some tests which check the strings
+# outputted to pass, these tests compare attr values logged to stdout
+# against those in various results files.
+
+def dump_attr_values(vals):
+ result = ""
+ for value in vals:
+ if len(result):
+ result = "," + result
+ try:
+ result = result + str(value)
+ except UnicodeDecodeError:
+ result = result + repr(value)
+ return result
class dbcheck(object):
"""check a SAM database for errors"""
def __init__(self, samdb, samdb_schema=None, verbose=False, fix=False,
yes=False, quiet=False, in_transaction=False,
- reset_well_known_acls=False):
+ quick_membership_checks=False,
+ reset_well_known_acls=False,
+ check_expired_tombstones=False):
self.samdb = samdb
self.dict_oid_name = None
self.samdb_schema = (samdb_schema or samdb)
self.fix_all_string_dn_component_mismatch = False
self.fix_all_GUID_dn_component_mismatch = False
self.fix_all_SID_dn_component_mismatch = False
+ self.fix_all_SID_dn_component_missing = False
self.fix_all_old_dn_string_component_mismatch = False
self.fix_all_metadata = False
self.fix_time_metadata = False
self.fix_utf8_userparameters = False
self.fix_doubled_userparameters = False
self.fix_sid_rid_set_conflict = False
+ self.quick_membership_checks = quick_membership_checks
self.reset_well_known_acls = reset_well_known_acls
+ self.check_expired_tombstones = check_expired_tombstones
+ self.expired_tombstones = 0
self.reset_all_well_known_acls = False
self.in_transaction = in_transaction
self.infrastructure_dn = ldb.Dn(samdb, "CN=Infrastructure," + samdb.domain_dn())
self.fix_missing_deleted_objects = False
self.fix_replica_locations = False
self.fix_missing_rid_set_master = False
+ self.fix_changes_after_deletion_bug = False
self.dn_set = set()
self.link_id_cache = {}
else:
self.rid_set_dn = None
+ ntds_service_dn = "CN=Directory Service,CN=Windows NT,CN=Services,%s" % \
+ self.samdb.get_config_basedn().get_linearized()
+ res = samdb.search(base=ntds_service_dn,
+ scope=ldb.SCOPE_BASE,
+ expression="(objectClass=nTDSService)",
+ attrs=["tombstoneLifetime"])
+ if "tombstoneLifetime" in res[0]:
+ self.tombstoneLifetime = int(res[0]["tombstoneLifetime"][0])
+ else:
+ self.tombstoneLifetime = 180
+
self.compatibleFeatures = []
self.requiredFeatures = []
raise
pass
- def check_database(self, DN=None, scope=ldb.SCOPE_SUBTREE, controls=[], attrs=['*']):
+ def check_database(self, DN=None, scope=ldb.SCOPE_SUBTREE, controls=None,
+ attrs=None):
'''perform a database check, returning the number of errors found'''
res = self.samdb.search(base=DN, scope=scope, attrs=['dn'], controls=controls)
self.report('Checking %u objects' % len(res))
if DN is None:
error_count += self.check_rootdse()
+ if self.expired_tombstones > 0:
+ self.report("NOTICE: found %d expired tombstones, "
+ "'samba' will remove them daily, "
+ "'samba-tool domain tombstones expunge' "
+ "would do that immediately." % (
+ self.expired_tombstones))
+
if error_count != 0 and not self.fix:
self.report("Please use --fix to fix these errors")
# as the original one, so that on replication we
# merge, rather than conflict.
proposed_objectguid = dsdb_dn.dn.get_extended_component("GUID")
- listwko.append(o)
+ listwko.append(str(o))
if proposed_objectguid is not None:
guid_suffix = "\nobjectGUID: %s" % str(misc.GUID(proposed_objectguid))
def do_modify(self, m, controls, msg, validate=True):
'''perform a modify with optional verbose output'''
+ controls = controls + ["local_oid:%s:0" % dsdb.DSDB_CONTROL_DBCHECK]
if self.verbose:
self.report(self.samdb.write_ldif(m, ldb.CHANGETYPE_MODIFY))
+ self.report("controls: %r" % controls)
try:
- controls = controls + ["local_oid:%s:0" % dsdb.DSDB_CONTROL_DBCHECK]
self.samdb.modify(m, controls=controls, validate=validate)
except Exception as err:
if self.in_transaction:
def err_duplicate_values(self, dn, attrname, dup_values, values):
'''fix attribute normalisation errors'''
self.report("ERROR: Duplicate values for attribute '%s' in '%s'" % (attrname, dn))
- self.report("Values contain a duplicate: [%s]/[%s]!" % (','.join(dup_values), ','.join(values)))
+ self.report("Values contain a duplicate: [%s]/[%s]!" % (','.join(dump_attr_values(dup_values)), ','.join(dump_attr_values(values))))
if not self.confirm_all("Fix duplicates for '%s' from '%s'?" % (attrname, dn), 'fix_all_duplicates'):
self.report("Not fixing attribute '%s'" % attrname)
return
def err_missing_target_dn_or_GUID(self, dn, attrname, val, dsdb_dn):
"""handle a missing target DN (if specified, GUID form can't be found,
and otherwise DN string form can't be found)"""
+
+ # Don't change anything if the object itself is deleted
+ if str(dn).find('\\0ADEL') != -1:
+ # We don't bump the error count as Samba produces these
+ # in normal operation
+ self.report("WARNING: no target object found for GUID "
+ "component link %s in deleted object "
+ "%s - %s" % (attrname, dn, val))
+ self.report("Not removing dangling one-way "
+ "link on deleted object "
+ "(tombstone garbage collection in progress?)")
+ return 0
+
# check if its a backlink
linkID, _ = self.get_attr_linkID_and_reverse_name(attrname)
if (linkID & 1 == 0) and str(dsdb_dn).find('\\0ADEL') == -1:
def err_incorrect_binary_dn(self, dn, attrname, val, dsdb_dn, errstr):
"""handle an incorrect binary DN component"""
self.report("ERROR: %s binary component for %s in object %s - %s" % (errstr, attrname, dn, val))
- controls = ["extended_dn:1:1", "show_recycled:1"]
if not self.confirm_all('Change DN to %s?' % str(dsdb_dn), 'fix_all_binary_dn'):
self.report("Not fixing %s" % errstr)
"Failed to fix incorrect DN %s on attribute %s" % (mismatch_type, attrname)):
self.report("Fixed incorrect DN %s on attribute %s" % (mismatch_type, attrname))
+ def err_dn_component_missing_target_sid(self, dn, attrname, val, dsdb_dn, target_sid_blob):
+ """handle a DN string being incorrect"""
+ self.report("ERROR: missing DN SID component for %s in object %s - %s" % (attrname, dn, val))
+
+ if len(dsdb_dn.prefix) != 0:
+ self.report("Not fixing missing DN SID on DN+BINARY or DN+STRING")
+ return
+
+ correct_dn = ldb.Dn(self.samdb, dsdb_dn.dn.extended_str())
+ correct_dn.set_extended_component("SID", target_sid_blob)
+
+ if not self.confirm_all('Change DN to %s?' % correct_dn.extended_str(),
+ 'fix_all_SID_dn_component_missing'):
+ self.report("Not fixing missing DN SID component")
+ return
+
+ target_guid_blob = correct_dn.get_extended_component("GUID")
+ guid_sid_dn = ldb.Dn(self.samdb, "")
+ guid_sid_dn.set_extended_component("GUID", target_guid_blob)
+ guid_sid_dn.set_extended_component("SID", target_sid_blob)
+
+ m = ldb.Message()
+ m.dn = dn
+ m['new_value'] = ldb.MessageElement(guid_sid_dn.extended_str(), ldb.FLAG_MOD_ADD, attrname)
+ controls = [
+ "show_recycled:1",
+ "local_oid:%s:1" % dsdb.DSDB_CONTROL_DBCHECK_FIX_LINK_DN_SID
+ ]
+ if self.do_modify(m, controls,
+ "Failed to ADD missing DN SID on attribute %s" % (attrname)):
+ self.report("Fixed missing DN SID on attribute %s" % (attrname))
+
def err_unknown_attribute(self, obj, attrname):
'''handle an unknown attribute error'''
self.report("ERROR: unknown attribute '%s' in %s" % (attrname, obj.dn))
res = self.samdb.search("",
scope=ldb.SCOPE_BASE, attrs=["dsServiceName"])
assert len(res) == 1
- serviceName = res[0]["dsServiceName"][0]
+ serviceName = str(res[0]["dsServiceName"][0])
if not self.confirm_all('Sieze role %s onto current DC by adding fSMORoleOwner=%s' % (obj.dn, serviceName), 'seize_fsmo_role'):
self.report("Not Siezing role %s onto current DC by adding fSMORoleOwner=%s" % (obj.dn, serviceName))
return
else:
self.samdb.transaction_cancel()
- def err_wrong_dn(self, obj, new_dn, rdn_attr, rdn_val, name_val):
+ def err_wrong_dn(self, obj, new_dn, rdn_attr, rdn_val, name_val, controls):
'''handle a wrong dn'''
new_rdn = ldb.Dn(self.samdb, str(new_dn))
self.report("Not renaming %s to %s" % (obj.dn, new_dn))
return
- if self.do_rename(obj.dn, new_rdn, new_parent, ["show_recycled:1", "relax:0"],
+ if self.do_rename(obj.dn, new_rdn, new_parent, controls,
"Failed to rename object %s into %s" % (obj.dn, new_dn)):
self.report("Renamed %s into %s" % (obj.dn, new_dn))
m = ldb.Message()
m.dn = obj.dn
- m['value'] = ldb.MessageElement(obj[attrname][0].decode('utf-16-le').decode('utf-16-le').encode('utf-16-le'),
+ # m['value'] = ldb.MessageElement(obj[attrname][0].decode('utf-16-le').decode('utf-16-le').encode('utf-16-le'),
+ # hmm the above old python2 code doesn't make sense to me and cannot
+ # work in python3 because a string doesn't have a decode method.
+ # However in python2 for some unknown reason this double decode
+ # followed by encode seems to result in what looks like utf8.
+ # In python2 just .decode('utf-16-le').encode('utf-16-le') does nothing
+ # but trigger the 'double UTF16 encoded' condition again :/
+ #
+ # In python2 and python3 value.decode('utf-16-le').encode('utf8') seems
+ # to do the trick and work as expected.
+ m['value'] = ldb.MessageElement(obj[attrname][0].decode('utf-16-le').encode('utf8'),
ldb.FLAG_MOD_REPLACE, 'userParameters')
+
if self.do_modify(m, [],
"Failed to correct doubled-UTF16 encoded userParameters on %s by converting" % (obj.dn)):
self.report("Corrected doubled-UTF16 encoded userParameters on %s by converting" % (obj.dn))
return (missing_forward_links, error_count)
if forward_syntax != ldb.SYNTAX_DN:
- self.report("Not checking for missing forward links for syntax: %s",
+ self.report("Not checking for missing forward links for syntax: %s" %
forward_syntax)
return (missing_forward_links, error_count)
else:
reverse_syntax_oid = None
- error_count, duplicate_dict, unique_dict = \
- self.check_duplicate_links(obj, attrname, syntax_oid, linkID, reverse_link_name)
+ is_member_link = attrname in ("member", "memberOf")
+ if is_member_link and self.quick_membership_checks:
+ duplicate_dict = {}
+ else:
+ error_count, duplicate_dict, unique_dict = \
+ self.check_duplicate_links(obj, attrname, syntax_oid,
+ linkID, reverse_link_name)
if len(duplicate_dict) != 0:
dsdb_dn.prefix = "B:8:%08X:" % int(res[0]['instanceType'][0])
dsdb_dn.binary = "%08X" % int(res[0]['instanceType'][0])
- if str(dsdb_dn) != val:
+ if str(dsdb_dn) != str(val):
error_count += 1
self.err_incorrect_binary_dn(obj.dn, attrname, val, dsdb_dn, "incorrect instanceType part of Binary DN")
continue
# now we have two cases - the source object might or might not be deleted
- is_deleted = 'isDeleted' in obj and obj['isDeleted'][0].upper() == 'TRUE'
- target_is_deleted = 'isDeleted' in res[0] and res[0]['isDeleted'][0].upper() == 'TRUE'
+ is_deleted = 'isDeleted' in obj and str(obj['isDeleted'][0]).upper() == 'TRUE'
+ target_is_deleted = 'isDeleted' in res[0] and str(res[0]['isDeleted'][0]).upper() == 'TRUE'
- if is_deleted and not obj.dn in self.deleted_objects_containers and linkID:
+ if is_deleted and obj.dn not in self.deleted_objects_containers and linkID:
# A fully deleted object should not have any linked
# attributes. (MS-ADTS 3.1.1.5.5.1.1 Tombstone
# Requirements and 3.1.1.5.5.1.3 Recycled-Object
if local_usn:
if 'replPropertyMetaData' in res[0]:
repl = ndr_unpack(drsblobs.replPropertyMetaDataBlob,
- str(res[0]['replPropertyMetadata']))
+ res[0]['replPropertyMetadata'][0])
found_data = False
for o in repl.ctr.array:
if o.attid == drsuapi.DRSUAPI_ATTID_isDeleted:
res[0].dn, "GUID")
continue
- if res[0].dn.get_extended_component("SID") != dsdb_dn.dn.get_extended_component("SID"):
+ target_sid = res[0].dn.get_extended_component("SID")
+ link_sid = dsdb_dn.dn.get_extended_component("SID")
+ if link_sid is None and target_sid is not None:
+ error_count += 1
+ self.err_dn_component_missing_target_sid(obj.dn, attrname, val,
+ dsdb_dn, target_sid)
+ continue
+ if link_sid != target_sid:
error_count += 1
self.err_dn_component_target_mismatch(obj.dn, attrname, val, dsdb_dn,
res[0].dn, "SID")
dsdb_dn, res[0].dn)
continue
+ if is_member_link and self.quick_membership_checks:
+ continue
+
# check the reverse_link is correct if there should be one
match_count = 0
if reverse_link_name in res[0]:
return error_count
+ def find_repl_attid(self, repl, attid):
+ for o in repl.ctr.array:
+ if o.attid == attid:
+ return o
+
+ return None
+
def get_originating_time(self, val, attid):
'''Read metadata properties and return the originating time for
a given attributeId.
:return: the originating time or 0 if not found
'''
- repl = ndr_unpack(drsblobs.replPropertyMetaDataBlob, str(val))
- obj = repl.ctr
-
- for o in repl.ctr.array:
- if o.attid == attid:
- return o.originating_change_time
-
+ repl = ndr_unpack(drsblobs.replPropertyMetaDataBlob, val)
+ o = self.find_repl_attid(repl, attid)
+ if o is not None:
+ return o.originating_change_time
return 0
def process_metadata(self, dn, val):
list_attid = []
in_schema_nc = dn.is_child_of(self.schema_dn)
- repl = ndr_unpack(drsblobs.replPropertyMetaDataBlob, str(val))
- obj = repl.ctr
+ repl = ndr_unpack(drsblobs.replPropertyMetaDataBlob, val)
for o in repl.ctr.array:
att = self.samdb_schema.get_lDAPDisplayName_by_attid(o.attid)
dn = ldb.Dn(self.samdb, "<GUID=%s>" % guid_str)
res = self.samdb.search(base=dn, scope=ldb.SCOPE_BASE, attrs=[attr],
controls=["search_options:1:2",
- "show_recycled:1"])
+ "show_recycled:1"])
msg = res[0]
nmsg = ldb.Message()
nmsg.dn = dn
sd_attr = "nTSecurityDescriptor"
sd_val = obj[sd_attr]
- sd = ndr_unpack(security.descriptor, str(sd_val))
+ sd = ndr_unpack(security.descriptor, sd_val[0])
- is_deleted = 'isDeleted' in obj and obj['isDeleted'][0].upper() == 'TRUE'
+ is_deleted = 'isDeleted' in obj and str(obj['isDeleted'][0]).upper() == 'TRUE'
if is_deleted:
# we don't fix deleted objects
return (sd, None)
attrs=["isDeleted", "objectClass"],
controls=["show_recycled:1"])
o = res[0]
- is_deleted = 'isDeleted' in o and o['isDeleted'][0].upper() == 'TRUE'
+ is_deleted = 'isDeleted' in o and str(o['isDeleted'][0]).upper() == 'TRUE'
if is_deleted:
# we don't fix deleted objects
return (sd, None)
'''re-write the SD due to not matching the default (optional mode for fixing an incorrect provision)'''
sd_attr = "nTSecurityDescriptor"
sd_val = ndr_pack(sd)
- sd_old_val = ndr_pack(sd_old)
sd_flags = security.SECINFO_DACL | security.SECINFO_SACL
if sd.owner_sid is not None:
sd_flags |= security.SECINFO_OWNER
self.report("Fixed attribute '%s' of '%s'\n" % (sd_attr, dn))
self.samdb.set_session_info(self.system_session_info)
+ def is_expired_tombstone(self, dn, repl_val):
+ if self.check_expired_tombstones:
+ # This is not the default, it's just
+ # used to keep dbcheck tests work with
+ # old static provision dumps
+ return False
+
+ repl = ndr_unpack(drsblobs.replPropertyMetaDataBlob, repl_val)
+
+ isDeleted = self.find_repl_attid(repl, drsuapi.DRSUAPI_ATTID_isDeleted)
+
+ delete_time = samba.nttime2unix(isDeleted.originating_change_time)
+ current_time = time.time()
+
+ tombstone_delta = self.tombstoneLifetime * (24 * 60 * 60)
+
+ delta = current_time - delete_time
+ if delta <= tombstone_delta:
+ return False
+
+ self.report("SKIPING: object %s is an expired tombstone" % dn)
+ self.report("isDeleted: attid=0x%08x version=%d invocation=%s usn=%s (local=%s) at %s" % (
+ isDeleted.attid,
+ isDeleted.version,
+ isDeleted.originating_invocation_id,
+ isDeleted.originating_usn,
+ isDeleted.local_usn,
+ time.ctime(samba.nttime2unix(isDeleted.originating_change_time))))
+ self.expired_tombstones += 1
+ return True
+
+ def find_changes_after_deletion(self, repl_val):
+ repl = ndr_unpack(drsblobs.replPropertyMetaDataBlob, repl_val)
+
+ isDeleted = self.find_repl_attid(repl, drsuapi.DRSUAPI_ATTID_isDeleted)
+
+ delete_time = samba.nttime2unix(isDeleted.originating_change_time)
+
+ tombstone_delta = self.tombstoneLifetime * (24 * 60 * 60)
+
+ found = []
+ for o in repl.ctr.array:
+ if o.attid == drsuapi.DRSUAPI_ATTID_isDeleted:
+ continue
+
+ if o.local_usn <= isDeleted.local_usn:
+ continue
+
+ if o.originating_change_time <= isDeleted.originating_change_time:
+ continue
+
+ change_time = samba.nttime2unix(o.originating_change_time)
+
+ delta = change_time - delete_time
+ if delta <= tombstone_delta:
+ continue
+
+ # If the modification happened after the tombstone lifetime
+ # has passed, we have a bug as the object might be deleted
+ # already on other DCs and won't be able to replicate
+ # back
+ found.append(o)
+
+ return found, isDeleted
+
+ def has_changes_after_deletion(self, dn, repl_val):
+ found, isDeleted = self.find_changes_after_deletion(repl_val)
+ if len(found) == 0:
+ return False
+
+ def report_attid(o):
+ try:
+ attname = self.samdb_schema.get_lDAPDisplayName_by_attid(o.attid)
+ except KeyError:
+ attname = "<unknown:0x%x08x>" % o.attid
+
+ self.report("%s: attid=0x%08x version=%d invocation=%s usn=%s (local=%s) at %s" % (
+ attname, o.attid, o.version,
+ o.originating_invocation_id,
+ o.originating_usn,
+ o.local_usn,
+ time.ctime(samba.nttime2unix(o.originating_change_time))))
+
+ self.report("ERROR: object %s, has changes after deletion" % dn)
+ report_attid(isDeleted)
+ for o in found:
+ report_attid(o)
+
+ return True
+
+ def err_changes_after_deletion(self, dn, repl_val):
+ found, isDeleted = self.find_changes_after_deletion(repl_val)
+
+ in_schema_nc = dn.is_child_of(self.schema_dn)
+ rdn_attr = dn.get_rdn_name()
+ rdn_attid = self.samdb_schema.get_attid_from_lDAPDisplayName(rdn_attr,
+ is_schema_nc=in_schema_nc)
+
+ unexpected = []
+ for o in found:
+ if o.attid == rdn_attid:
+ continue
+ if o.attid == drsuapi.DRSUAPI_ATTID_name:
+ continue
+ if o.attid == drsuapi.DRSUAPI_ATTID_lastKnownParent:
+ continue
+ try:
+ attname = self.samdb_schema.get_lDAPDisplayName_by_attid(o.attid)
+ except KeyError:
+ attname = "<unknown:0x%x08x>" % o.attid
+ unexpected.append(attname)
+
+ if len(unexpected) > 0:
+ self.report('Unexpeted attributes: %s' % ",".join(unexpected))
+ self.report('Not fixing changes after deletion bug')
+ return
+
+ if not self.confirm_all('Delete broken tombstone object %s deleted %s days ago?' % (
+ dn, self.tombstoneLifetime), 'fix_changes_after_deletion_bug'):
+ self.report('Not fixing changes after deletion bug')
+ return
+
+ if self.do_delete(dn, ["relax:0"],
+ "Failed to remove DN %s" % dn):
+ self.report("Removed DN %s" % dn)
+
def has_replmetadata_zero_invocationid(self, dn, repl_meta_data):
repl = ndr_unpack(drsblobs.replPropertyMetaDataBlob,
- str(repl_meta_data))
+ repl_meta_data)
ctr = repl.ctr
found = False
for o in ctr.array:
def err_replmetadata_zero_invocationid(self, dn, attr, repl_meta_data):
repl = ndr_unpack(drsblobs.replPropertyMetaDataBlob,
- str(repl_meta_data))
+ repl_meta_data)
ctr = repl.ctr
now = samba.unix2nttime(int(time.time()))
found = False
def err_replmetadata_unknown_attid(self, dn, attr, repl_meta_data):
repl = ndr_unpack(drsblobs.replPropertyMetaDataBlob,
- str(repl_meta_data))
+ repl_meta_data)
ctr = repl.ctr
for o in ctr.array:
# Search for an invalid attid
def err_replmetadata_incorrect_attid(self, dn, attr, repl_meta_data, wrong_attids):
repl = ndr_unpack(drsblobs.replPropertyMetaDataBlob,
- str(repl_meta_data))
+ repl_meta_data)
fix = False
set_att = set()
if "description" not in obj:
self.report("ERROR: description not present on Deleted Objects container %s" % obj.dn)
faulty = True
- if "showInAdvancedViewOnly" not in obj or obj['showInAdvancedViewOnly'][0].upper() == 'FALSE':
+ if "showInAdvancedViewOnly" not in obj or str(obj['showInAdvancedViewOnly'][0]).upper() == 'FALSE':
self.report("ERROR: showInAdvancedViewOnly not present on Deleted Objects container %s" % obj.dn)
faulty = True
if "objectCategory" not in obj:
self.report("ERROR: objectCategory not present on Deleted Objects container %s" % obj.dn)
faulty = True
- if "isCriticalSystemObject" not in obj or obj['isCriticalSystemObject'][0].upper() == 'FALSE':
+ if "isCriticalSystemObject" not in obj or str(obj['isCriticalSystemObject'][0]).upper() == 'FALSE':
self.report("ERROR: isCriticalSystemObject not present on Deleted Objects container %s" % obj.dn)
faulty = True
if "isRecycled" in obj:
self.report("ERROR: isRecycled present on Deleted Objects container %s" % obj.dn)
faulty = True
- if "isDeleted" in obj and obj['isDeleted'][0].upper() == 'FALSE':
+ if "isDeleted" in obj and str(obj['isDeleted'][0]).upper() == 'FALSE':
self.report("ERROR: isDeleted not set on Deleted Objects container %s" % obj.dn)
faulty = True
if "objectClass" not in obj or (len(obj['objectClass']) != 2 or
- obj['objectClass'][0] != 'top' or
- obj['objectClass'][1] != 'container'):
+ str(obj['objectClass'][0]) != 'top' or
+ str(obj['objectClass'][1]) != 'container'):
self.report("ERROR: objectClass incorrectly set on Deleted Objects container %s" % obj.dn)
faulty = True
- if "systemFlags" not in obj or obj['systemFlags'][0] != '-1946157056':
+ if "systemFlags" not in obj or str(obj['systemFlags'][0]) != '-1946157056':
self.report("ERROR: systemFlags incorrectly set on Deleted Objects container %s" % obj.dn)
faulty = True
return faulty
target = self.samdb.get_dsServiceName()
if self.samdb.am_rodc():
- self.report('Not fixing %s for the RODC' % (attr, obj.dn))
+ self.report('Not fixing %s %s for the RODC' % (attr, obj.dn))
return
if not self.confirm_all('Add yourself to the replica locations for %s?'
raise
else:
instancetype |= dsdb.INSTANCE_TYPE_NC_ABOVE
-
- if self.write_ncs is not None and str(nc_root) in self.write_ncs:
+ if self.write_ncs is not None and str(nc_root) in [str(x) for x in self.write_ncs]:
instancetype |= dsdb.INSTANCE_TYPE_WRITE
return instancetype
raise KeyError
- def check_object(self, dn, attrs=['*']):
+ def check_object(self, dn, attrs=None):
'''check one object'''
if self.verbose:
self.report("Checking object %s" % dn)
-
- # If we modify the pass-by-reference attrs variable, then we get a
- # replPropertyMetadata for every object that we check.
- attrs = list(attrs)
+ if attrs is None:
+ attrs = ['*']
+ else:
+ # make a local copy to modify
+ attrs = list(attrs)
if "dn" in map(str.lower, attrs):
attrs.append("name")
if "distinguishedname" in map(str.lower, attrs):
error_count = 0
set_attrs_from_md = set()
set_attrs_seen = set()
- got_repl_property_meta_data = False
got_objectclass = False
nc_dn = self.samdb.get_nc_root(obj.dn)
name_val = None
isDeleted = False
systemFlags = 0
+ repl_meta_data_val = None
+
+ for attrname in obj:
+ if str(attrname).lower() == 'isdeleted':
+ if str(obj[attrname][0]) != "FALSE":
+ isDeleted = True
+
+ if str(attrname).lower() == 'systemflags':
+ systemFlags = int(obj[attrname][0])
+
+ if str(attrname).lower() == 'replpropertymetadata':
+ repl_meta_data_val = obj[attrname][0]
+
+ if isDeleted and repl_meta_data_val:
+ if self.has_changes_after_deletion(dn, repl_meta_data_val):
+ error_count += 1
+ self.err_changes_after_deletion(dn, repl_meta_data_val)
+ return error_count
+ if self.is_expired_tombstone(dn, repl_meta_data_val):
+ return error_count
for attrname in obj:
if attrname == 'dn' or attrname == "distinguishedName":
self.report("ERROR: Not fixing num_values(%d) for '%s' on '%s'" %
(len(obj[attrname]), attrname, str(obj.dn)))
else:
- name_val = obj[attrname][0]
+ name_val = str(obj[attrname][0])
if str(attrname).lower() == str(obj.dn.get_rdn_name()).lower():
object_rdn_attr = attrname
self.report("ERROR: Not fixing num_values(%d) for '%s' on '%s'" %
(len(obj[attrname]), attrname, str(obj.dn)))
else:
- object_rdn_val = obj[attrname][0]
-
- if str(attrname).lower() == 'isdeleted':
- if obj[attrname][0] != "FALSE":
- isDeleted = True
-
- if str(attrname).lower() == 'systemflags':
- systemFlags = int(obj[attrname][0])
+ object_rdn_val = str(obj[attrname][0])
if str(attrname).lower() == 'replpropertymetadata':
- if self.has_replmetadata_zero_invocationid(dn, obj[attrname]):
+ if self.has_replmetadata_zero_invocationid(dn, obj[attrname][0]):
error_count += 1
- self.err_replmetadata_zero_invocationid(dn, attrname, obj[attrname])
+ self.err_replmetadata_zero_invocationid(dn, attrname, obj[attrname][0])
# We don't continue, as we may also have other fixes for this attribute
# based on what other attributes we see.
try:
(set_attrs_from_md, list_attid_from_md, wrong_attids) \
- = self.process_metadata(dn, obj[attrname])
+ = self.process_metadata(dn, obj[attrname][0])
except KeyError:
error_count += 1
self.err_replmetadata_unknown_attid(dn, attrname, obj[attrname])
or len(wrong_attids) > 0 \
or sorted(list_attid_from_md) != list_attid_from_md:
error_count += 1
- self.err_replmetadata_incorrect_attid(dn, attrname, obj[attrname], wrong_attids)
+ self.err_replmetadata_incorrect_attid(dn, attrname, obj[attrname][0], wrong_attids)
else:
# Here we check that the first attid is 0
# (objectClass).
if list_attid_from_md[0] != 0:
error_count += 1
- self.report("ERROR: Not fixing incorrect inital attributeID in '%s' on '%s', it should be objectClass" %
+ self.report("ERROR: Not fixing incorrect initial attributeID in '%s' on '%s', it should be objectClass" %
(attrname, str(dn)))
- got_repl_property_meta_data = True
continue
if str(attrname).lower() == 'ntsecuritydescriptor':
continue
current_sd = ndr_unpack(security.descriptor,
- str(obj[attrname][0]))
+ obj[attrname][0])
diff = get_diff_sds(well_known_sd, current_sd, security.dom_sid(self.samdb.get_domain_sid()))
if diff != "":
continue
if str(attrname).lower() == 'userparameters':
- if len(obj[attrname][0]) == 1 and obj[attrname][0][0] == '\x20':
+ if len(obj[attrname][0]) == 1 and obj[attrname][0][0] == b'\x20'[0]:
error_count += 1
self.err_short_userParameters(obj, attrname, obj[attrname])
continue
- elif obj[attrname][0][:16] == '\x20\x00\x20\x00\x20\x00\x20\x00\x20\x00\x20\x00\x20\x00\x20\x00':
+ elif obj[attrname][0][:16] == b'\x20\x00\x20\x00\x20\x00\x20\x00\x20\x00\x20\x00\x20\x00\x20\x00':
# This is the correct, normal prefix
continue
- elif obj[attrname][0][:20] == 'IAAgACAAIAAgACAAIAAg':
+ elif obj[attrname][0][:20] == b'IAAgACAAIAAgACAAIAAg':
# this is the typical prefix from a windows migration
error_count += 1
self.err_base64_userParameters(obj, attrname, obj[attrname])
continue
- elif obj[attrname][0][1] != '\x00' and obj[attrname][0][3] != '\x00' and obj[attrname][0][5] != '\x00' and obj[attrname][0][7] != '\x00' and obj[attrname][0][9] != '\x00':
+ #43:00:00:00:74:00:00:00:78
+ elif obj[attrname][0][1] != b'\x00'[0] and obj[attrname][0][3] != b'\x00'[0] and obj[attrname][0][5] != b'\x00'[0] and obj[attrname][0][7] != b'\x00'[0] and obj[attrname][0][9] != b'\x00'[0]:
# This is a prefix that is not in UTF-16 format for the space or munged dialback prefix
error_count += 1
self.err_utf8_userParameters(obj, attrname, obj[attrname])
elif len(obj[attrname][0]) % 2 != 0:
# This is a value that isn't even in length
error_count += 1
- self.err_odd_userParameters(obj, attrname, obj[attrname])
+ self.err_odd_userParameters(obj, attrname)
continue
- elif obj[attrname][0][1] == '\x00' and obj[attrname][0][2] == '\x00' and obj[attrname][0][3] == '\x00' and obj[attrname][0][4] != '\x00' and obj[attrname][0][5] == '\x00':
+ elif obj[attrname][0][1] == b'\x00'[0] and obj[attrname][0][2] == b'\x00'[0] and obj[attrname][0][3] == b'\x00'[0] and obj[attrname][0][4] != b'\x00'[0] and obj[attrname][0][5] == b'\x00'[0]:
# This is a prefix that would happen if a SAMR-written value was replicated from a Samba 4.1 server to a working server
error_count += 1
self.err_doubled_userParameters(obj, attrname, obj[attrname])
# check for empty attributes
for val in obj[attrname]:
- if val == '':
+ if val == b'':
self.err_empty_attribute(dn, attrname)
error_count += 1
continue
set_attrs_seen.add(str(attrname).lower())
if syntax_oid in [dsdb.DSDB_SYNTAX_BINARY_DN, dsdb.DSDB_SYNTAX_OR_NAME,
- dsdb.DSDB_SYNTAX_STRING_DN, ldb.SYNTAX_DN]:
+ dsdb.DSDB_SYNTAX_STRING_DN, ldb.SYNTAX_DN]:
# it's some form of DN, do specialised checking on those
error_count += self.check_dn(obj, attrname, syntax_oid)
else:
values = set()
# check for incorrectly normalised attributes
for val in obj[attrname]:
- values.add(str(val))
+ values.add(val)
normalised = self.samdb.dsdb_normalise_attributes(self.samdb_schema, attrname, [val])
if len(normalised) != 1 or normalised[0] != val:
if str(attrname).lower() == "instancetype":
calculated_instancetype = self.calculate_instancetype(dn)
- if len(obj["instanceType"]) != 1 or obj["instanceType"][0] != str(calculated_instancetype):
+ if len(obj["instanceType"]) != 1 or int(obj["instanceType"][0]) != calculated_instancetype:
error_count += 1
self.err_wrong_instancetype(obj, calculated_instancetype)
if name_val is not None:
parent_dn = None
+ controls = ["show_recycled:1", "relax:0"]
if isDeleted:
if not (systemFlags & samba.dsdb.SYSTEM_FLAG_DISALLOW_MOVE_ON_DELETE):
parent_dn = deleted_objects_dn
+ controls += ["local_oid:%s:1" % dsdb.DSDB_CONTROL_DBCHECK_FIX_LINK_DN_NAME]
if parent_dn is None:
parent_dn = obj.dn.parent()
expected_dn = ldb.Dn(self.samdb, "RDN=RDN,%s" % (parent_dn))
if expected_dn != obj.dn:
error_count += 1
- self.err_wrong_dn(obj, expected_dn, object_rdn_attr, object_rdn_val, name_val)
+ self.err_wrong_dn(obj, expected_dn, object_rdn_attr,
+ object_rdn_val, name_val, controls)
elif obj.dn.get_rdn_value() != object_rdn_val:
error_count += 1
self.report("ERROR: Not fixing %s=%r on '%s'" % (object_rdn_attr, object_rdn_val, str(obj.dn)))
show_dn = True
- if got_repl_property_meta_data:
+ if repl_meta_data_val:
if obj.dn == deleted_objects_dn:
isDeletedAttId = 131120
# It's 29/12/9999 at 23:59:59 UTC as specified in MS-ADTS 7.1.1.4.2 Deleted Objects Container
expectedTimeDo = 2650466015990000000
- originating = self.get_originating_time(obj["replPropertyMetaData"], isDeletedAttId)
+ originating = self.get_originating_time(repl_meta_data_val, isDeletedAttId)
if originating != expectedTimeDo:
if self.confirm_all("Fix isDeleted originating_change_time on '%s'" % str(dn), 'fix_time_metadata'):
nmsg = ldb.Message()
except ldb.LdbError as e11:
(enum, estr) = e11.args
if enum == ldb.ERR_NO_SUCH_OBJECT:
- self.err_missing_parent(obj)
- error_count += 1
+ if isDeleted:
+ self.report("WARNING: parent object not found for %s" % (obj.dn))
+ self.report("Not moving to LostAndFound "
+ "(tombstone garbage collection in progress?)")
+ else:
+ self.err_missing_parent(obj)
+ error_count += 1
else:
raise
found = False
for loc in msg[location]:
- if loc == self.samdb.get_dsServiceName():
+ if str(loc) == self.samdb.get_dsServiceName():
found = True
if not found:
# This DC is not in the replica locations
error_count = 0
# check that the dsServiceName is in GUID form
- if not 'dsServiceName' in obj:
+ if 'dsServiceName' not in obj:
self.report('ERROR: dsServiceName missing in @ROOTDSE')
return error_count + 1
- if not obj['dsServiceName'][0].startswith('<GUID='):
+ if not str(obj['dsServiceName'][0]).startswith('<GUID='):
self.report('ERROR: dsServiceName not in GUID form in @ROOTDSE')
error_count += 1
if not self.confirm('Change dsServiceName to GUID form?'):