added feature to limit duplicate search

This commit is contained in:
q
2012-12-01 19:41:35 +02:00
parent 9ab77f59a3
commit 480e4de1f1

View File

@@ -233,12 +233,16 @@ def confirm(prompt=None, resp=False):
if ans == 'n' or ans == 'N':
return False
def find_duplicates(sqlfile):
def find_duplicates(sqlfile,search):
if (search=='.'):
search='%'
else:
search='%'+search+'%'
conn=sqlite3.connect(sqlfile)
conn.text_factory=str
db=conn.cursor()
dbh=conn.cursor()
db.execute("SELECT hash,count(*) FROM list group by hash HAVING count(*) > 1 ")
db.execute("SELECT hash,count(*) FROM list WHERE file LIKE ? group by hash HAVING count(*) > 1 ",(search,))
duphash=[]
for row in db:
hash=row[0]
@@ -300,7 +304,7 @@ def main():
print('Random lists...')
random_lists(options.sqlfile)
if options.duplicate:
files=find_duplicates(options.sqlfile)
files=find_duplicates(options.sqlfile,options.startpath)
print_dup_structure(files)
if options.searchsmall:
files=find_smalls(options.minsize,options.sqlfile)