reorganization, and output filename for SimpleWeb
This commit is contained in:
64
files/FolderFlat
Executable file
64
files/FolderFlat
Executable file
@@ -0,0 +1,64 @@
|
||||
#!/bin/bash
|
||||
function help() {
|
||||
echo "Flatten the directory structure from the current folder downwards"
|
||||
echo "Files in subfolders will be renamed / -> _"
|
||||
echo "Empty folders are removed"
|
||||
echo " -f to force action "
|
||||
echo " -n to move files as is, i.e. not include path names in new name "
|
||||
echo " -p C Replace path separator / with character C "
|
||||
}
|
||||
|
||||
function helpexit() {
|
||||
help
|
||||
exit
|
||||
}
|
||||
|
||||
function preview() {
|
||||
echo ""
|
||||
for f in $( find . -mindepth 2 -type f -path '*.*' -printf %P'\n' | head ); do
|
||||
[[ "$NO_PATH" = "1" ]] && {
|
||||
echo "$f => " .
|
||||
} || {
|
||||
echo "$f =>" "${f//\//$SEP}"
|
||||
}
|
||||
done
|
||||
echo "..."
|
||||
}
|
||||
|
||||
function flat() {
|
||||
for f in $( find . -mindepth 2 -type f -path '*.*' -printf %P'\n' ); do
|
||||
[[ "$NO_PATH" = "1" ]] && {
|
||||
mv -iv "$f" .
|
||||
} || {
|
||||
mv -iv "$f" "${f//\//$SEP}"
|
||||
}
|
||||
done
|
||||
find . -depth -type d -empty -delete
|
||||
}
|
||||
|
||||
SEP="_"
|
||||
FORCE=0
|
||||
while getopts fhnp: opt
|
||||
do case "$opt" in
|
||||
f)
|
||||
FORCE=1
|
||||
;;
|
||||
h)
|
||||
helpexit
|
||||
;;
|
||||
n)
|
||||
NO_PATH=1
|
||||
;;
|
||||
p)
|
||||
SEP=$OPTARG
|
||||
;;
|
||||
esac
|
||||
done
|
||||
IFS=$'\n'
|
||||
[[ "$FORCE" = "0" ]] && {
|
||||
help
|
||||
preview
|
||||
echo "Are you sure? Break with ctrl-c"
|
||||
read i
|
||||
}
|
||||
flat
|
||||
164
files/FolderSplit.py
Executable file
164
files/FolderSplit.py
Executable file
@@ -0,0 +1,164 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
import os,sys
|
||||
import math,shutil,re
|
||||
from random import shuffle
|
||||
|
||||
VERSION="0.1"
|
||||
|
||||
def setup_options():
|
||||
''' Setup the command line options '''
|
||||
from argparse import ArgumentParser
|
||||
|
||||
parser=ArgumentParser(description="Splits files to subfolders equally.")
|
||||
|
||||
parser.add_argument("--order",'-o',type=str,action='store', dest='order',default="sequence",
|
||||
help="Splitting method.",
|
||||
choices=['sequence','sparse','regexp','random'])
|
||||
parser.add_argument("-m",action='store_true', dest='move',default=False,
|
||||
help="Move entries instead of hardlink.")
|
||||
parser.add_argument("-f",action='store_true', dest='files',default=False,
|
||||
help="Split files only, skipping folders")
|
||||
parser.add_argument("-r",'--regexp',type=str,action='store', dest='regexp',default="",
|
||||
help="Regular expression for splitting. When set, order regexp used, -n not used.")
|
||||
parser.add_argument("-n",'-N',type=int,action='store', dest='n',
|
||||
help="Number of subfolders to split into.")
|
||||
parser.add_argument("path",type=str,action="store",default=".",nargs="?",
|
||||
help="Folder to split.")
|
||||
options=parser.parse_args()
|
||||
if options.n==None and options.regexp=="":
|
||||
parser.print_help()
|
||||
parser.error("Either -n or -r must be passed")
|
||||
if options.regexp!="":
|
||||
options.order="regexp"
|
||||
return options
|
||||
|
||||
def linktree(src, dst):
|
||||
"""Recursively link a directory tree using os.link.
|
||||
Modified from shutil.copytree
|
||||
"""
|
||||
names = os.listdir(src)
|
||||
os.makedirs(dst)
|
||||
errors = []
|
||||
for name in names:
|
||||
srcname = os.path.join(src, name)
|
||||
dstname = os.path.join(dst, name)
|
||||
try:
|
||||
if os.path.isdir(srcname):
|
||||
linktree(srcname, dstname)
|
||||
else:
|
||||
# Will raise a SpecialFileError for unsupported file types
|
||||
os.link(srcname, dstname)
|
||||
except Error, err:
|
||||
errors.extend(err.args[0])
|
||||
except EnvironmentError, why:
|
||||
errors.append((srcname, dstname, str(why)))
|
||||
|
||||
if errors:
|
||||
raise Error, errors
|
||||
|
||||
def copyfileorfolder(basename,source,target,move):
|
||||
''' Copies a file or folder structure under target folder '''
|
||||
if move:
|
||||
shutil.move(os.path.join(source,basename),os.path.join(target,basename))
|
||||
return
|
||||
if os.path.isfile(os.path.join(source,basename)):
|
||||
os.link(os.path.join(source,basename),os.path.join(target,basename))
|
||||
return
|
||||
if os.path.isdir(os.path.join(source,basename)):
|
||||
linktree(os.path.join(source,basename),os.path.join(target,basename))
|
||||
return
|
||||
raise RuntimeError(source+' was neither file nor folder.')
|
||||
|
||||
def portorder(inFiles,inFolder,outFolders,N,link):
|
||||
''' Copy files in port order (sparse) '''
|
||||
outidx=0
|
||||
for row in inFiles:
|
||||
copyfileorfolder(row,inFolder,outFolders[outidx],link)
|
||||
outidx+=1
|
||||
if outidx+1>N:
|
||||
outidx=0
|
||||
|
||||
def fileorder(inFiles,inFolder,outFolders,N,link):
|
||||
''' Copy files in input file order (sequnce) '''
|
||||
|
||||
bins=[int(math.floor(float(len(inFiles))/float(N)))]*int(N)
|
||||
binidx=0
|
||||
while sum(bins)<len(inFiles):
|
||||
bins[binidx]+=1
|
||||
binidx+=1
|
||||
offsets=list(offset(bins))
|
||||
offsets.insert(0,0)
|
||||
|
||||
for outidx in range(N):
|
||||
for f in range(offsets[outidx], offsets[outidx]+bins[outidx]):
|
||||
copyfileorfolder(inFiles[f],inFolder,outFolders[outidx],link)
|
||||
|
||||
def regexorder(inFiles,inFolder,outFolders,matcher,uniqlabel,link):
|
||||
''' Copy files by regex match '''
|
||||
|
||||
for f in inFiles:
|
||||
m=matcher.search(f)
|
||||
if m:
|
||||
outidx=uniqlabel.index(m.group(1))
|
||||
copyfileorfolder(f,inFolder,outFolders[outidx],link)
|
||||
|
||||
def regexmatches(inFiles, opts):
|
||||
matcher=re.compile(opts.regexp)
|
||||
matches=[]
|
||||
skipped=0
|
||||
for f in inFiles:
|
||||
m=matcher.search(f)
|
||||
if m:
|
||||
matches.append(m.group(1))
|
||||
else:
|
||||
skipped+=1
|
||||
uniqlabel=sorted(set(matches))
|
||||
print("Unique matches",uniqlabel)
|
||||
print("Not matching %d files."% skipped)
|
||||
for x in uniqlabel:
|
||||
outFolders.append(os.path.join(opts.path,x))
|
||||
return (outFolders, uniqlabel, matcher)
|
||||
|
||||
def offset(it):
|
||||
total = 0
|
||||
for x in it:
|
||||
total += x
|
||||
yield total
|
||||
|
||||
def report(outFolders):
|
||||
for x in outFolders:
|
||||
n=len(os.listdir(x))
|
||||
print( os.path.basename( x )+":"+str(n) )
|
||||
|
||||
''' Splits a folder input in N outputs '''
|
||||
options=setup_options()
|
||||
outFolders=[]
|
||||
method = options.order.lower().strip()
|
||||
# list files, and remove hidden (.files)
|
||||
inFiles=sorted(filter(lambda x: not x.startswith('.'), os.listdir(options.path)))
|
||||
if options.files:
|
||||
inFiles=[ f for f in inFiles if os.path.isfile( os.path.join( options.path, f ) ) ]
|
||||
|
||||
if method=='regexp':
|
||||
(outFolders, uniqlabel, matcher)=regexmatches(inFiles, options)
|
||||
raw_input("correct?")
|
||||
else:
|
||||
for x in range(options.n):
|
||||
outFolders.append(os.path.join(options.path,'folder'+str(x+1)))
|
||||
|
||||
for x in outFolders:
|
||||
if not os.path.isdir(x):
|
||||
os.mkdir(x)
|
||||
if method=='random':
|
||||
shuffle(inFiles)
|
||||
portorder(inFiles,options.path,outFolders,options.n,options.move)
|
||||
if method=='regexp':
|
||||
regexorder(inFiles,options.path,outFolders,matcher,uniqlabel,options.move)
|
||||
if method=='sparse':
|
||||
portorder(inFiles,options.path,outFolders,options.n,options.move)
|
||||
if method=='sequence':
|
||||
fileorder(inFiles,options.path,outFolders,options.n,options.move)
|
||||
|
||||
|
||||
report(outFolders)
|
||||
295
files/diskfree-tracker
Executable file
295
files/diskfree-tracker
Executable file
@@ -0,0 +1,295 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys,os,glob
|
||||
from datetime import datetime
|
||||
from datetime import timedelta
|
||||
import re,signal,time
|
||||
import subprocess
|
||||
#,threading
|
||||
|
||||
VERSION=2
|
||||
|
||||
W= '30'
|
||||
R= '31'
|
||||
G= '32'
|
||||
Y= '33'
|
||||
B= '34'
|
||||
M= '35'
|
||||
C= '36'
|
||||
S= '1'
|
||||
E= '0'
|
||||
BR= '41'
|
||||
CLR = '\033[2J'
|
||||
SAVE = '\033[s'
|
||||
LOAD = '\033[u'
|
||||
CLRLN = '\033[K'
|
||||
CLRBLN = '\033[1K'
|
||||
DOWN = '\033[1B'
|
||||
|
||||
SORTKEY = lambda key: (key[2].split('/')[-1].lower())
|
||||
|
||||
def setup_options():
|
||||
''' Setup the command line options '''
|
||||
from argparse import ArgumentParser
|
||||
import argparse
|
||||
|
||||
parser=ArgumentParser(description='''
|
||||
Tool to clean up and colorize the output of Anduril.
|
||||
Example: anduril run yourscript.and | %(prog)s
|
||||
You can tap in to an existing log with:
|
||||
tail -f -n +0 log/_global | %(prog)s''',formatter_class=argparse.RawTextHelpFormatter)
|
||||
|
||||
parser.add_argument("--no-colors",'--nc',action="store_false",dest="colors",default=True,
|
||||
help="Disable colored output")
|
||||
parser.add_argument("-n",type=int,dest="delay",default=3,
|
||||
help="Refresh delay")
|
||||
parser.add_argument("-1",action="store_true",dest="once",default=False,
|
||||
help="Run once and exit")
|
||||
|
||||
parser.add_argument("--version",action='version', version=VERSION)
|
||||
options=parser.parse_args()
|
||||
|
||||
return options
|
||||
|
||||
def c(attribs):
|
||||
''' ANSI colorizer '''
|
||||
if not options.colors:
|
||||
return ""
|
||||
return '\033['+';'.join(attribs)+'m'
|
||||
|
||||
def pos(y,x):
|
||||
''' ANSI absolute position set '''
|
||||
return "\033["+str(y)+";"+str(x)+"H"
|
||||
|
||||
|
||||
def colorize(string):
|
||||
''' colorizes a string based on color_match '''
|
||||
if not options.colors:
|
||||
return string
|
||||
for co in color_match:
|
||||
string=color_match[co][0].sub(color_match[co][1],string)
|
||||
return string
|
||||
|
||||
|
||||
def count_running(string, stats):
|
||||
''' Counts the running executions '''
|
||||
|
||||
spl=[i for i in " ".join(string.split()).split(' ')]
|
||||
if len(spl)!=7:
|
||||
return stats
|
||||
if spl[6] in stats['files']:
|
||||
index=stats['files'].index(spl[6])
|
||||
speed_history=stats['running'][index][1][1:]
|
||||
speed_history.append((int(spl[4])*1024-(stats['running'][index][0]))/stats['delay'])
|
||||
stats['running'][index]=(int(spl[4])*1024, # free space
|
||||
speed_history, # change speed
|
||||
spl[6], # mount point
|
||||
stats['running'][index][3], # free space program start
|
||||
spl[5], # usage in %
|
||||
spl[1], # mount type
|
||||
int(spl[2])*1024 # total space
|
||||
)
|
||||
else:
|
||||
stats['running'].append((int(spl[4])*1024,
|
||||
[int(0)]*5,
|
||||
spl[6],
|
||||
int(spl[4])*1024,
|
||||
spl[5],
|
||||
spl[1],
|
||||
int(spl[2])*1024
|
||||
))
|
||||
stats['running'].sort(key=SORTKEY)
|
||||
|
||||
stats['files']=[i[2] for i in stats['running']]
|
||||
totalfree=sum([i[0] for i in stats['running']])
|
||||
total=sum([i[6] for i in stats['running']])
|
||||
stats['totals']=[totalfree, total]
|
||||
return stats
|
||||
|
||||
class EndProgram( Exception ):
|
||||
''' Nice way of exiting the program '''
|
||||
pass
|
||||
|
||||
|
||||
def is_number(s):
|
||||
''' Check if string is float '''
|
||||
try:
|
||||
out=float(s)
|
||||
return True
|
||||
except:
|
||||
return False
|
||||
|
||||
def str_short(s,stats):
|
||||
''' shorten text to fit screen '''
|
||||
maxL=stats['size'][1] - 16
|
||||
if len(s)<maxL:
|
||||
return s
|
||||
spl=s.split('/')
|
||||
sNew=spl[0]+'/...'+'/'.join(spl[1:])[-(maxL-len(spl[0])-5):]
|
||||
return sNew
|
||||
|
||||
|
||||
def print_stats(stats):
|
||||
''' Prints logged errors, and the status line '''
|
||||
#sys.stdout.write(SAVE)
|
||||
e=0
|
||||
sys.stdout.write(pos(e+1,0)+c((S,C))+"= DISK FREE = "+c((E))+
|
||||
human_time(stats['time'])+'=>'+c((S,G))+human_time()+c((E))+CLRLN)
|
||||
if (stats['running']):
|
||||
pass
|
||||
else:
|
||||
return
|
||||
sys.stdout.write(pos(e+2,0)+" TotalDiff Free Total (usage%) Diff/s (positive=more free space)"+CLRLN)
|
||||
for ex in enumerate(stats['running']):
|
||||
sys.stdout.write(pos(e+3+ex[0],0)+'('+str(ex[0]+1).rjust(2)+') '+
|
||||
' '.join([human_size(ex[1][0]-ex[1][3]).rjust(10),
|
||||
human_size(ex[1][0]).rjust(10),
|
||||
human_size(ex[1][6]).rjust(8),
|
||||
colorize_usage(ex[1][4]),
|
||||
(human_size(mean_speed(ex[1][1]))+"/s").rjust(10),
|
||||
ex[1][2],
|
||||
"("+ex[1][5]+")"])+
|
||||
CLRLN)
|
||||
sys.stdout.write(pos(e+4+ex[0],0)+'Total:'+
|
||||
' '.join([' '.rjust(9),
|
||||
human_size(stats['totals'][0]).rjust(10),
|
||||
'/',
|
||||
human_size(stats['totals'][1]).ljust(10)
|
||||
])+
|
||||
CLRLN)
|
||||
for i in range(stats['size'][0]-7-len(stats['running'])):
|
||||
sys.stdout.write(pos(e+5+ex[0]+i,0)+" "+CLRLN)
|
||||
|
||||
sys.stdout.write(DOWN+CLRBLN+CLRLN)
|
||||
#sys.stdout.write(LOAD)
|
||||
|
||||
def print_stats_once(stats):
|
||||
''' Prints logged errors, once '''
|
||||
e=0
|
||||
sys.stdout.write(c((S,C))+"= DISK FREE = "+c((E,))+CLRLN+'\n')
|
||||
if (stats['running']):
|
||||
pass
|
||||
else:
|
||||
return
|
||||
sys.stdout.write(" Total Used Use% Free"+CLRLN+'\n')
|
||||
for ex in enumerate(stats['running']):
|
||||
sys.stdout.write(
|
||||
' '.join([
|
||||
human_size(ex[1][6]).rjust(8),
|
||||
human_size(ex[1][6]-ex[1][0]).rjust(10),
|
||||
colorize_usage(ex[1][4]),
|
||||
human_size(ex[1][0]).rjust(10),
|
||||
ex[1][2],
|
||||
"("+ex[1][5]+")"])+
|
||||
CLRLN+'\n')
|
||||
sys.stdout.write(
|
||||
' '.join([
|
||||
human_size(stats['totals'][1]).rjust(8),
|
||||
human_size(stats['totals'][1]-stats['totals'][0]).rjust(10), ' ',
|
||||
human_size(stats['totals'][0]).rjust(10)
|
||||
])+
|
||||
CLRLN+'\n')
|
||||
|
||||
|
||||
def colorize_usage(string):
|
||||
''' colorizes the usage string '''
|
||||
# string length indicates value <10
|
||||
if len(string)<3:
|
||||
return c((S,G))+" "+string+c((E))
|
||||
# string lenght indicates 100%
|
||||
if len(string)==4:
|
||||
return c((S,R))+string+c((E))
|
||||
usage=int(string[0:2])
|
||||
if usage>95:
|
||||
return c((S,R))+" "+string+c((E))
|
||||
if usage<80:
|
||||
return c((S,G))+" "+string+c((E))
|
||||
|
||||
return c((S,Y))+" "+string+c((E))
|
||||
|
||||
|
||||
def mean_speed(history):
|
||||
speed=sum(history)/len(history)
|
||||
return int(speed)
|
||||
|
||||
def human_time(dt=False):
|
||||
if not dt:
|
||||
dt=datetime.now()
|
||||
return dt.strftime("%H:%M:%S")
|
||||
|
||||
def human_size(size,precision=1):
|
||||
if size==None:
|
||||
return 'nan'
|
||||
sign=""
|
||||
if size<0:
|
||||
sign="-"
|
||||
size=-size
|
||||
suffixes=['B','KB','MB','GB','TB']
|
||||
suffixIndex = 0
|
||||
defPrecision=0
|
||||
while size > 1024:
|
||||
suffixIndex += 1
|
||||
size = size/1024.0
|
||||
defPrecision=precision
|
||||
return "%s%.*f%s"%(sign,defPrecision,size,suffixes[suffixIndex])
|
||||
|
||||
def readinput(lf):
|
||||
try:
|
||||
line = lf.stdout.readline()
|
||||
#line=lf.readline()
|
||||
return line
|
||||
except:
|
||||
return "CleanerTimeout"
|
||||
|
||||
def termsize():
|
||||
rows, columns = os.popen('stty size', 'r').read().split()
|
||||
return (int(rows),int(columns))
|
||||
|
||||
|
||||
options=setup_options()
|
||||
|
||||
color_match={#'line_ends':(re.compile('$'),c.END),
|
||||
'err':(re.compile('(Failed)'),c([R,S])+'\\1'+c([E])),
|
||||
'done':(re.compile('(Done)'),c([G,S])+'\\1'+c([E])),
|
||||
'percent':(re.compile('([0-9]+%)'),c([Y,S])+'\\1'+c([E])),
|
||||
}
|
||||
|
||||
stats={'time':datetime.now(),
|
||||
'running':[],
|
||||
'files':[],
|
||||
'totals':[],
|
||||
'size': termsize(),
|
||||
'delay': options.delay
|
||||
}
|
||||
|
||||
if not options.once:
|
||||
sys.stdout.write(CLR+pos(0,0)+"Launching...")
|
||||
while 1:
|
||||
|
||||
try:
|
||||
|
||||
proc = subprocess.Popen(['df','-x','tmpfs','-x','devtmpfs','-T'],stdout=subprocess.PIPE)
|
||||
# set a 5 second timeout for the line read.
|
||||
#~ signal.signal(signal.SIGALRM, transfers.readline)
|
||||
#~ signal.alarm(5)
|
||||
stdout,stderr=proc.communicate()
|
||||
if not stdout:
|
||||
raise EndProgram
|
||||
|
||||
for line in stdout.split('\n')[1:]:
|
||||
stats=count_running(line,stats)
|
||||
|
||||
if options.once:
|
||||
print_stats_once(stats)
|
||||
sys.exit(0)
|
||||
print_stats(stats)
|
||||
sys.stdout.flush()
|
||||
time.sleep(options.delay)
|
||||
|
||||
except EndProgram,KeyboardInterrupt:
|
||||
sys.stdout.write(DOWN+'\n')
|
||||
sys.stdout.flush()
|
||||
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
368
files/file_list.py
Executable file
368
files/file_list.py
Executable file
@@ -0,0 +1,368 @@
|
||||
#!/usr/bin/python
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
import sqlite3
|
||||
import subprocess
|
||||
import hashlib
|
||||
import magic
|
||||
from argparse import ArgumentParser
|
||||
|
||||
SQLFILE='list_of_files.sqlite'
|
||||
IMGMATCH=re.compile('.*\.jpg$|.*\.jpeg$|.*\.png$',re.I)
|
||||
BADDIRS=[]
|
||||
MINSIZE=0
|
||||
MIME=magic.open(magic.MAGIC_NONE)
|
||||
#MIME=magic.open(magic.MAGIC_MIME)
|
||||
MIME.load()
|
||||
|
||||
def setup_options():
|
||||
parser=ArgumentParser(description="Maintains the list of images sqlite file")
|
||||
parser.add_argument("-a",action="store_false",dest="add",default=True,
|
||||
help="Do not add new files [%(default)s]")
|
||||
parser.add_argument("-c",action="store_true",dest="changed",default=False,
|
||||
help="Modify changed files [%(default)s]")
|
||||
parser.add_argument("-d",action="store_true",dest="delete",default=False,
|
||||
help="Delete non-existing entries [%(default)s]")
|
||||
parser.add_argument("--du",type=str,action='store',dest="diskused",default=False,
|
||||
help="Print directory sizes. Argument is the path where directories are listed from.")
|
||||
parser.add_argument("--du-depth",type=str,action='store',dest="diskused_depth",default=1,
|
||||
help="Depth of summarization for --du.")
|
||||
parser.add_argument("--dup",action="store_true",dest="duplicate",default=False,
|
||||
help="Return a list of duplicate files, based on hashes. This option will flip the 'Add new files' option. [%(default)s]")
|
||||
parser.add_argument("--full",action="store_true",dest="fullfile",default=False,
|
||||
help="Use full files to calculate md5 checksum. Defaults to first 50Mb. [%(default)s]")
|
||||
parser.add_argument("--haschanges",action="store_true",dest="haschanges",default=False,
|
||||
help="Do not change anything, return True and exit code 1 if DB needs update. Exit code 0 if all intact.")
|
||||
parser.add_argument("--hasdeletions",action="store_true",dest="hasdeletions",default=False,
|
||||
help="Do not change anything, return True and exit code 1 if DB needs update. Exit code 0 if all intact.")
|
||||
parser.add_argument("--hasadditions",action="store_true",dest="hasadditions",default=False,
|
||||
help="Do not change anything, return True and exit code 1 if DB needs update. Exit code 0 if all intact.")
|
||||
parser.add_argument("-f",action="store",dest="sqlfile",default=SQLFILE,
|
||||
help="SQL file name to use [%(default)s]")
|
||||
parser.add_argument("-l",action="store_true",dest="symlinks",default=False,
|
||||
help="Follow symbolic links [%(default)s]")
|
||||
parser.add_argument("--match",type=str,dest="match",default=False,
|
||||
help="Search for closest match from basenames, can be helped with adding -s")
|
||||
parser.add_argument("-s",type=str,action='append',dest="search",default=[],
|
||||
help="Search list based on path pattern")
|
||||
parser.add_argument("-x",action="append",dest="exclude",default=[],
|
||||
help="Exclude folder name from the lists. This option may be issued several times")
|
||||
parser.add_argument('startpath', action="store",default='.', nargs='?')
|
||||
|
||||
options=parser.parse_args()
|
||||
BADDIRS.extend(options.exclude)
|
||||
if options.duplicate:
|
||||
options.add=not options.add
|
||||
options.startpath=unicode(options.startpath, "UTF-8")
|
||||
return options
|
||||
|
||||
def createdb(fname):
|
||||
conn=sqlite3.connect(fname)
|
||||
db=conn.cursor()
|
||||
conn.text_factory=str
|
||||
db.execute('CREATE TABLE list (id INTEGER PRIMARY KEY AUTOINCREMENT,\
|
||||
file TEXT,date INTEGER, hash TEXT,\
|
||||
size INTEGER, mime TEXT)')
|
||||
conn.commit()
|
||||
return
|
||||
|
||||
def delete_nonexisting(sqlfile,options):
|
||||
conn=sqlite3.connect(sqlfile)
|
||||
conn.text_factory=str
|
||||
db=conn.cursor()
|
||||
dbdel=conn.cursor()
|
||||
db.execute('SELECT file FROM list')
|
||||
for row in db:
|
||||
if os.path.exists(row[0]):
|
||||
delete=False
|
||||
if not options.symlinks:
|
||||
if os.path.islink(row[0]):
|
||||
delete=True
|
||||
else:
|
||||
delete=True
|
||||
if delete:
|
||||
print('removing.. '+row[0])
|
||||
dbdel.execute("DELETE FROM list where file == ?",(row[0],))
|
||||
conn.commit()
|
||||
return
|
||||
|
||||
def disk_used(options):
|
||||
conn=sqlite3.connect(options.sqlfile)
|
||||
conn.text_factory=str
|
||||
db=conn.cursor()
|
||||
db.execute('SELECT size,replace(file,?,"") as path FROM list WHERE file LIKE ?',
|
||||
(os.path.realpath(options.diskused)+"/",
|
||||
os.path.realpath(options.diskused)+"%",
|
||||
))
|
||||
entries=[]
|
||||
sizes=[]
|
||||
for row in db:
|
||||
start_path=row[1].split('/')
|
||||
start_path="/".join(start_path[0:int(options.diskused_depth)])
|
||||
if start_path not in entries:
|
||||
entries.append(start_path)
|
||||
sizes.append(row[0])
|
||||
else:
|
||||
sizes[ entries.index(start_path) ]+=row[0]
|
||||
for entry in zip(sizes,entries):
|
||||
print("| ".join([ str(entry[0]).ljust(14),
|
||||
humanize_size(entry[0]).rjust(8),
|
||||
entry[1]]))
|
||||
|
||||
def has_changes(options):
|
||||
conn=sqlite3.connect(options.sqlfile)
|
||||
conn.text_factory=str
|
||||
db=conn.cursor()
|
||||
if options.haschanges:
|
||||
options.changed=True
|
||||
if options.hasdeletions or options.haschanges:
|
||||
has_changes_deleted(db)
|
||||
if options.hasadditions or options.haschanges:
|
||||
has_changes_additions(db,options)
|
||||
|
||||
def has_changes_deleted(db):
|
||||
db.execute('SELECT file FROM list')
|
||||
for row in db:
|
||||
if not os.path.exists(row[0]):
|
||||
print('True')
|
||||
sys.exit(1)
|
||||
return
|
||||
|
||||
def has_changes_additions(db,options):
|
||||
for path,dirs,files in os.walk(options.startpath,followlinks=options.symlinks):
|
||||
dirs=clean_dirs(dirs)
|
||||
db_files=get_folder_contents(db,os.path.realpath(path)+'/')
|
||||
if not options.symlinks:
|
||||
files=clean_syms(files,path)
|
||||
for file in files:
|
||||
filename=os.path.realpath(os.path.join(path,file))
|
||||
if file==options.sqlfile:
|
||||
continue
|
||||
#if not is_listed(db,filename):
|
||||
if file not in db_files:
|
||||
print('True')
|
||||
sys.exit(1)
|
||||
else:
|
||||
if options.changed:
|
||||
ftime=os.path.getmtime(filename)
|
||||
if not ftime_match(db,filename,ftime):
|
||||
#file content changed
|
||||
print('True')
|
||||
sys.exit(1)
|
||||
|
||||
return
|
||||
|
||||
def add_recurse(options):
|
||||
conn=sqlite3.connect(options.sqlfile)
|
||||
conn.text_factory=str
|
||||
db=conn.cursor()
|
||||
for path,dirs,files in os.walk(options.startpath,followlinks=options.symlinks):
|
||||
dirs=clean_dirs(dirs)
|
||||
dirs.sort()
|
||||
files.sort()
|
||||
db_files=get_folder_contents(db,os.path.realpath(path)+'/')
|
||||
if not options.symlinks:
|
||||
files=clean_syms(files,path)
|
||||
for file in files:
|
||||
filename=os.path.realpath(os.path.join(path,file))
|
||||
if file==options.sqlfile:
|
||||
continue
|
||||
#if not is_listed(db,filename):
|
||||
if file not in db_files:
|
||||
if options.add:
|
||||
add_single(conn,filename,change=False,fullfile=options.fullfile)
|
||||
else:
|
||||
if options.changed:
|
||||
ftime=os.path.getmtime(filename)
|
||||
if not ftime_match(db,filename,ftime):
|
||||
#file content changed
|
||||
add_single(conn,filename,change=True,fullfile=options.fullfile)
|
||||
conn.commit()
|
||||
|
||||
return
|
||||
|
||||
def add_single(conn,filename,change=False,hash=None,minsize=0,fullfile=False):
|
||||
|
||||
print "%(f)s" % {'f':filename}
|
||||
db=conn.cursor()
|
||||
try:
|
||||
if hash==None:
|
||||
hash=get_md5(filename,fullfile)
|
||||
ftime=os.path.getmtime(filename)
|
||||
fsize=os.path.getsize(filename)
|
||||
mime=MIME.file(filename.encode('UTF-8'))
|
||||
except IOError:
|
||||
print("File not found. Bad link?")
|
||||
return
|
||||
except UnicodeDecodeError:
|
||||
mime="NA"
|
||||
|
||||
if change:
|
||||
db.execute("UPDATE list SET date=?, hash=?, size=?, mime=? \
|
||||
WHERE file=?",(ftime,hash,fsize,mime,filename))
|
||||
#print "changing: %(f)s " % {'f':filename}
|
||||
else:
|
||||
db.execute("INSERT INTO list(file,date,hash,size,mime)\
|
||||
VALUES(?,?,?,?,?)",(filename,ftime,hash,fsize,mime))
|
||||
return
|
||||
|
||||
def is_listed(db,filename):
|
||||
db.execute("SELECT COUNT(*) FROM list where file == ?",(filename,))
|
||||
count=db.fetchall()
|
||||
return count[0][0]>0
|
||||
|
||||
def get_folder_contents(db,path):
|
||||
''' return the contents of the folder '''
|
||||
files=[]
|
||||
db.execute("SELECT file FROM list where file LIKE ?",(path+'%',))
|
||||
for row in db:
|
||||
try:
|
||||
base=row[0].decode('utf-8').replace(path,'',1)
|
||||
except UnicodeDecodeError:
|
||||
print(row[0]+" is giving me trouble.")
|
||||
try:
|
||||
base=row[0].encode('utf-8').replace(path,'',1)
|
||||
except UnicodeDecodeError:
|
||||
print(row[0]+" is still giving me trouble.")
|
||||
sys.exit(1)
|
||||
if base.find('/')==-1:
|
||||
files.append(base)
|
||||
return files
|
||||
|
||||
def ftime_match(db,filename,ftime):
|
||||
db.execute("SELECT date FROM list where file == ?",(filename,))
|
||||
count=db.fetchall()
|
||||
return count[0][0]==ftime
|
||||
|
||||
def hash_match(db,filename,hash):
|
||||
db.execute("SELECT hash FROM list where file == ?",(filename,))
|
||||
count=db.fetchall()
|
||||
return count[0][0]==hash
|
||||
|
||||
def humanize_size(size,precision=1):
|
||||
if size==None:
|
||||
return 'nan'
|
||||
suffixes=['B','KB','MB','GB','TB']
|
||||
suffixIndex = 0
|
||||
defPrecision=0
|
||||
while size > 1024:
|
||||
suffixIndex += 1 #increment the index of the suffix
|
||||
size = size/1024.0 #apply the division
|
||||
defPrecision=precision
|
||||
return "%.*f%s"%(defPrecision,size,suffixes[suffixIndex])
|
||||
|
||||
def get_md5(filename,fullfile=False):
|
||||
''' returns content based hash, only first 50Mb is read, unless user wants the whole file '''
|
||||
if fullfile:
|
||||
block_size=2**20
|
||||
md5 = hashlib.md5()
|
||||
with open(filename,'rb') as f:
|
||||
for chunk in iter(lambda: f.read(block_size), b''):
|
||||
md5.update(chunk)
|
||||
return md5.hexdigest()
|
||||
return hashlib.md5(open(filename,'rb').read(1024*1024*50)).hexdigest()
|
||||
|
||||
def clean_dirs(dirs):
|
||||
for s in dirs[:]:
|
||||
if (s in BADDIRS) or (s.startswith(".")):
|
||||
dirs.remove(s)
|
||||
return dirs
|
||||
|
||||
def clean_syms(files,path):
|
||||
nonsyms=[]
|
||||
for f in files:
|
||||
if not os.path.islink(os.path.join(path,f)):
|
||||
nonsyms.append(f)
|
||||
return nonsyms
|
||||
|
||||
def find_duplicates(sqlfile):
|
||||
conn=sqlite3.connect(sqlfile)
|
||||
conn.text_factory=str
|
||||
db=conn.cursor()
|
||||
dbh=conn.cursor()
|
||||
db.execute("SELECT hash,count(*) FROM list WHERE size > 0 GROUP BY hash HAVING count(*) > 1 ")
|
||||
duphash=[]
|
||||
for row in db:
|
||||
hash=row[0]
|
||||
dbh.execute("SELECT file,size,date FROM list WHERE hash = ?",(hash,))
|
||||
flist=[]
|
||||
for row in dbh:
|
||||
flist.append(row)
|
||||
flist.sort(key=lambda file: file[0])
|
||||
duphash.append((hash, flist))
|
||||
duphash.sort(key=lambda file: file[1][0])
|
||||
return duphash
|
||||
|
||||
def searchdb(sqlfile,needle):
|
||||
needle=['%'+i+'%' for i in needle]
|
||||
like_query=' OR '.join(['file LIKE ?' for i in needle])
|
||||
conn=sqlite3.connect(sqlfile)
|
||||
conn.text_factory=str
|
||||
db=conn.cursor()
|
||||
db.execute("SELECT file FROM list WHERE "+like_query+" ORDER BY file",needle)
|
||||
for row in db:
|
||||
print(row[0])
|
||||
|
||||
def matchdb(sqlfile,needle,helper):
|
||||
needle=needle.lower()
|
||||
import difflib as dl
|
||||
conn=sqlite3.connect(sqlfile)
|
||||
conn.text_factory=str
|
||||
db=conn.cursor()
|
||||
if len(helper)>0:
|
||||
helper=['%'+i+'%' for i in helper]
|
||||
like_query=' OR '.join(['file LIKE ?' for i in helper])
|
||||
db.execute("SELECT file FROM list WHERE "+like_query+" ORDER BY date DESC",helper)
|
||||
else:
|
||||
db.execute("SELECT file FROM list ORDER BY date DESC")
|
||||
ratio=0
|
||||
best_match=""
|
||||
for row in db:
|
||||
s=dl.SequenceMatcher(None, os.path.basename(row[0]).lower(), needle)
|
||||
s_ratio=s.ratio()
|
||||
if ratio < s_ratio:
|
||||
ratio=s_ratio
|
||||
best_match=row[0]
|
||||
print(best_match)
|
||||
|
||||
def print_structure(files):
|
||||
for hash in files:
|
||||
#print(hash[0])
|
||||
i=1
|
||||
for f in hash[1]:
|
||||
print "%(i)d: %(x)d:%(f)s " % {'i':i, 'f':f[0], 'x':f[1]}
|
||||
i+=1
|
||||
return
|
||||
|
||||
def main():
|
||||
options=setup_options();
|
||||
|
||||
if not os.path.exists(options.sqlfile):
|
||||
createdb(options.sqlfile);
|
||||
if options.haschanges or options.hasadditions or options.hasdeletions:
|
||||
has_changes(options)
|
||||
sys.exit(0)
|
||||
if len(options.search)>0 and not options.match:
|
||||
searchdb(options.sqlfile,options.search)
|
||||
sys.exit(0)
|
||||
if options.match:
|
||||
matchdb(options.sqlfile,options.match,options.search)
|
||||
sys.exit(0)
|
||||
if options.diskused:
|
||||
disk_used(options)
|
||||
sys.exit(0)
|
||||
if options.delete:
|
||||
print('Deleting entries...')
|
||||
delete_nonexisting(options.sqlfile,options)
|
||||
if options.add or options.changed:
|
||||
print('Adding '+options.startpath+' entries...')
|
||||
add_recurse(options)
|
||||
if options.duplicate:
|
||||
files=find_duplicates(options.sqlfile)
|
||||
print_structure(files)
|
||||
|
||||
sys.exit(0)
|
||||
|
||||
main()
|
||||
|
||||
66
files/fimplate.py
Executable file
66
files/fimplate.py
Executable file
@@ -0,0 +1,66 @@
|
||||
#!/usr/bin/python
|
||||
import sys,re,os
|
||||
from argparse import ArgumentParser
|
||||
from argparse import RawTextHelpFormatter
|
||||
def setup_options():
|
||||
parser=ArgumentParser(description="""Template Filler
|
||||
|
||||
=== Template example: ===
|
||||
Hello [[name]]!
|
||||
|
||||
|
||||
== Value file/Value pair example: ==
|
||||
[[name]]=John
|
||||
""",formatter_class=RawTextHelpFormatter)
|
||||
parser.add_argument("-e",action="store_true",dest="env",default=False,
|
||||
help="Use the environment to replace ${env} style variables.")
|
||||
parser.add_argument("-f",action="store",dest="file",
|
||||
help="File name to read keys/values.")
|
||||
parser.add_argument("-p",action="append",dest="values",default=[],
|
||||
help="key=value pairs. This option may be issued several times.")
|
||||
parser.add_argument('template', action="store", nargs='?',
|
||||
help="Template file to be filled. If not defined, stdin used.")
|
||||
options=parser.parse_args()
|
||||
return options
|
||||
|
||||
def parse_file(filename):
|
||||
pairs=[]
|
||||
with open(filename,"r") as reader:
|
||||
for i,l in enumerate(reader):
|
||||
l=l.rstrip("\n\r")
|
||||
if len(l)==0:
|
||||
continue
|
||||
tokens = l.split('=', 1)
|
||||
if len(tokens)!=2:
|
||||
print("File %s:%i key=value pair '%s' does not parse"%(filename,i+1,l,))
|
||||
sys.exit(1)
|
||||
pairs.append( (tokens[0], tokens[1].decode('string_escape')) )
|
||||
return pairs
|
||||
|
||||
def parse_arguments(args):
|
||||
pairs=[]
|
||||
for p in args:
|
||||
tokens = p.split('=', 1)
|
||||
if len(tokens)!=2:
|
||||
print("Argument key=value pair '%s' does not parse"%(p,))
|
||||
sys.exit(1)
|
||||
pairs.append( (tokens[0], tokens[1].decode('string_escape')) )
|
||||
return pairs
|
||||
|
||||
options=setup_options();
|
||||
pairs=[]
|
||||
if options.file!=None:
|
||||
pairs.extend(parse_file(options.file))
|
||||
pairs.extend(parse_arguments(options.values))
|
||||
if options.template==None:
|
||||
in_reader=sys.stdin
|
||||
else:
|
||||
in_reader=open(options.template,'rb')
|
||||
for l in in_reader:
|
||||
for p in pairs:
|
||||
l=l.replace(p[0],p[1])
|
||||
if options.env:
|
||||
var_list=[m.group(0) for m in re.finditer('\${[^ ]+}', l)]
|
||||
for v in var_list:
|
||||
l=l.replace(v,os.environ.get(v[:-1][2:],""))
|
||||
sys.stdout.write(l)
|
||||
1082
files/image_list.py
Executable file
1082
files/image_list.py
Executable file
File diff suppressed because it is too large
Load Diff
208
files/image_list_tagger.py
Executable file
208
files/image_list_tagger.py
Executable file
@@ -0,0 +1,208 @@
|
||||
#!/usr/bin/python
|
||||
import sys,os
|
||||
from Tkinter import *
|
||||
import Image, ImageTk, math, ImageDraw
|
||||
from argparse import ArgumentParser
|
||||
import sqlite3
|
||||
|
||||
SQLFILE='list_of_images.sqlite'
|
||||
IMGMATCH=re.compile('.*\.jpg$|.*\.jpeg$|.*\.png$|.*\.gif$',re.I)
|
||||
|
||||
def setup_options():
|
||||
parser=ArgumentParser(description="Tag images in image_list")
|
||||
parser.add_argument("-f",action="store",dest="sqlfile",default=SQLFILE,
|
||||
help="SQL file name to use [%(default)s]")
|
||||
parser.add_argument("-t",action="store",dest="tags",default="thisIsGood,thisIsBad",
|
||||
help="Comma separated list of tags")
|
||||
parser.add_argument("-n",action="store",type=int,dest="number",default=0,
|
||||
help="Number of tags expected. 0 is any amount.")
|
||||
parser.add_argument('path', action="store",default='.', nargs='?')
|
||||
|
||||
options=parser.parse_args()
|
||||
options.tags=[x.strip() for x in options.tags.split(",")]
|
||||
return options
|
||||
|
||||
|
||||
class Click:
|
||||
def __init__(self, image,tags,sqlfile,title,number):
|
||||
|
||||
self.title=title
|
||||
self.root=Tk()
|
||||
self.root.geometry("1024x768+220+0")
|
||||
self.root.bind("<Escape>", self._quit)
|
||||
self.root.title(self.title)
|
||||
|
||||
self.numberLimit=number
|
||||
self.numberCurrent=1
|
||||
self.image = Image.open(image)
|
||||
self.image_name = image
|
||||
self.img_copy= self.image.copy()
|
||||
self.pixels = self.img_copy.load()
|
||||
self.background_image = ImageTk.PhotoImage(self.image)
|
||||
|
||||
self.background = Label(self.root, image=self.background_image, cursor='cross')
|
||||
self.background.pack(fill="both", expand=True, side="left")
|
||||
self.background.bind('<Configure>', self._resize_image)
|
||||
|
||||
self.top=Toplevel(self.root)
|
||||
self.top.bind("<Escape>", self._quit)
|
||||
self.tagTexts=tags
|
||||
self.tags=[]
|
||||
self.tags.append(Button(self.top,text="[z] Next", command=self._tag_button("z")))
|
||||
self.root.bind("z", self._tag_key)
|
||||
for t in range(len(self.tagTexts)):
|
||||
return_func=self._tag_button(t+1)
|
||||
if t<34:
|
||||
self.tags.append(Button(self.top,text="[%s] %s"%(self._hotkey(t+1),self.tagTexts[t]), command=return_func))
|
||||
self.root.bind(self._hotkey(t+1), self._tag_key)
|
||||
#self.top.bind(str(t), self._tag_key)
|
||||
continue
|
||||
self.tags.append(Button(self.top,text=self.tagTexts[t], command=return_func))
|
||||
for t in range(len(self.tags)):
|
||||
self.tags[t].pack(fill="both", side="top")
|
||||
# custom tag
|
||||
self.tags.append(Entry(self.top))
|
||||
self.tags[t+1].pack(fill="both", side="top")
|
||||
self.tags[t+1].bind("<Return>", self._tag_key)
|
||||
self.tags.append(Button(self.top, text="Custom", command=self._tag_button("custom")))
|
||||
self.tags[t+2].pack(fill="both", side="top")
|
||||
|
||||
self.top.geometry("220x%d+0+0"%(self.root.winfo_screenheight()-150,))
|
||||
|
||||
self._resize_init()
|
||||
|
||||
self._init_db(sqlfile)
|
||||
self._print_tags()
|
||||
|
||||
self.root.mainloop()
|
||||
|
||||
def _tag_key(self,event):
|
||||
if event.keysym=="z":
|
||||
self._tag_button("z")()
|
||||
return
|
||||
if event.keysym=="Return":
|
||||
self._tag_button("custom")()
|
||||
return
|
||||
thisfunc=self._tag_button(self._yektoh(event.keysym))
|
||||
thisfunc()
|
||||
|
||||
def _tag_button(self,n):
|
||||
def actual_value(x=n):
|
||||
if x=="z":
|
||||
self._next(n)
|
||||
return
|
||||
self._add_tag(x)
|
||||
return actual_value
|
||||
|
||||
def _resize_image(self,event):
|
||||
new_width = event.width
|
||||
new_height = event.height
|
||||
self.image = self.img_copy.resize((new_width, new_height))
|
||||
self.background_image = ImageTk.PhotoImage(self.image)
|
||||
self.background.configure(image = self.background_image)
|
||||
|
||||
def _resize_init(self):
|
||||
event = self._get_max_size()
|
||||
self.root.geometry("%dx%d+220+0"% (event.width, event.height))
|
||||
self._resize_image(event)
|
||||
|
||||
def _get_max_size(self,refer_size=None):
|
||||
""" return max size for image that fits the refer_size,.
|
||||
otherwise, return max size for the screen """
|
||||
if refer_size:
|
||||
refer_width=refer_size[0]
|
||||
refer_height=refer_size[1]
|
||||
else:
|
||||
refer_width=int(float(self.root.winfo_screenwidth()-220))
|
||||
refer_height=self.root.winfo_screenheight()-150
|
||||
new_height=refer_height
|
||||
new_width= int(float(self.img_copy.size[0])/float(self.img_copy.size[1])*new_height)
|
||||
if new_width>refer_width:
|
||||
new_width=refer_width
|
||||
new_height=int(float(self.img_copy.size[1])/float(self.img_copy.size[0])*new_width)
|
||||
event = type('eventclass', (object,),
|
||||
{'width':new_width, 'height':new_height})()
|
||||
return event
|
||||
|
||||
def _quit(self,event):
|
||||
self.root.destroy()
|
||||
sys.exit(0)
|
||||
|
||||
def _next(self,event):
|
||||
self.root.destroy()
|
||||
|
||||
def _add_tag(self,x):
|
||||
if x=="custom":
|
||||
value=self.tags[ len(self.tags)-2 ].get().strip()
|
||||
if value not in self.tagTexts:
|
||||
self.tagTexts.append(value)
|
||||
if value=="":
|
||||
return
|
||||
else:
|
||||
value=self.tagTexts[x-1]
|
||||
self.db[0].execute("SELECT hash FROM list WHERE file = ?",( os.path.realpath( self.image_name), ))
|
||||
hashes=self.db[0].fetchall()
|
||||
if len(hashes)==0:
|
||||
print("Image %s not in database!" % (self.image_name) )
|
||||
self._next(None)
|
||||
return
|
||||
self.db[0].execute("INSERT INTO tags (hash,tag) VALUES (?,?)",( hashes[0][0],value ))
|
||||
self.db[1].commit()
|
||||
print("Added %s:%s"%(self.image_name,value))
|
||||
self.numberCurrent+=1
|
||||
if self.numberLimit>0 and self.numberLimit<self.numberCurrent:
|
||||
self._next(0)
|
||||
return
|
||||
|
||||
def _print_tags(self):
|
||||
self.db[0].execute("SELECT hash FROM list WHERE file = ?",( os.path.realpath( self.image_name), ))
|
||||
hashes=self.db[0].fetchall()
|
||||
if len(hashes)==0:
|
||||
print("Image %s not in database!" % (self.image_name) )
|
||||
self._next(None)
|
||||
return
|
||||
self.db[0].execute("SELECT tag FROM tags WHERE hash = ?",( hashes[0][0], ))
|
||||
|
||||
print("tags: "+",".join([x[0] for x in self.db[0]]))
|
||||
return
|
||||
|
||||
def _init_db(self, sqlfile):
|
||||
if not os.path.exists(sqlfile):
|
||||
print("Cannot find SQLite file: "+sqlfile)
|
||||
sys.exit(1)
|
||||
conn=sqlite3.connect(sqlfile)
|
||||
conn.text_factory=str
|
||||
db=conn.cursor()
|
||||
self.db=[db,conn]
|
||||
|
||||
def get_tags(self):
|
||||
return self.tagTexts
|
||||
|
||||
def _hotkey(self,i):
|
||||
# return string number for 1-9
|
||||
# a-l if i>9
|
||||
if i<10:
|
||||
return str(i)
|
||||
a=i+87
|
||||
if a<122:
|
||||
return chr(a)
|
||||
return
|
||||
def _yektoh(self,a):
|
||||
# return integer, reverse function of _hotkey
|
||||
i=ord(a)-87
|
||||
if i<0:
|
||||
return int(a)
|
||||
return i
|
||||
|
||||
opt=setup_options()
|
||||
if os.path.isfile(opt.path):
|
||||
imagelist=[opt.path]
|
||||
else:
|
||||
imagelist=sorted([os.path.join(opt.path,f) for f in os.listdir(opt.path) if IMGMATCH.match(f)])
|
||||
|
||||
for i,l in enumerate(imagelist):
|
||||
(p,b)=os.path.split( os.path.abspath(l) )
|
||||
(f,p)=os.path.split( p )
|
||||
sys.stdout.write("%s/%s %d/%d "%( p,b, i+1,len(imagelist)))
|
||||
clicker = Click(l,opt.tags,opt.sqlfile,"Tagger: %s/%s %d/%d "%( p,b, i+1,len(imagelist)),opt.number)
|
||||
#print(clicker.get_tags())
|
||||
37
files/mvregex
Executable file
37
files/mvregex
Executable file
@@ -0,0 +1,37 @@
|
||||
#!/bin/bash
|
||||
|
||||
function helpexit() {
|
||||
echo rename files in current folder replacing arg1 with arg2.
|
||||
echo '-n for match non ascii, replace with arg2'
|
||||
exit
|
||||
}
|
||||
|
||||
IFS=$'\n'
|
||||
[[ -z "$1" ]] && helpexit
|
||||
[[ "$1" = "-h" ]] && helpexit
|
||||
|
||||
POSTPROC="sed 's, , -> ,'"
|
||||
which ncsv &>/dev/null && POSTPROC="ncsv -c"
|
||||
c=0
|
||||
if [ "$1" = "-n" ]
|
||||
then for file in $( ls | grep -P "[\x80-\xFF]" )
|
||||
do echo "$file" '->' "$( echo $file | tr -d \\n\\r | tr -c [:print:] '\000' | sed s/'\x0'/"$2"/g )"
|
||||
c=$(( $c + 1 ))
|
||||
done
|
||||
echo $c' matches. Sure?'
|
||||
read i
|
||||
for file in $( ls | grep -P "[\x80-\xFF]" )
|
||||
do mv -iv -- "$file" "$( echo $file | tr -d \\n\\r | tr -c [:print:] '\000' | sed s/'\x0'/"$2"/g )"
|
||||
done
|
||||
exit
|
||||
fi
|
||||
(
|
||||
echo -e "Matching\tReplaced"
|
||||
for file in $( ls | grep -- "$1" )
|
||||
do echo -e "$file"\\t"$( echo $file | sed s/"$1"/"$2"/g )"
|
||||
done ) | eval $POSTPROC | grep --color=always -E "$1|$"
|
||||
echo 'Sure?'
|
||||
read i
|
||||
for file in $( ls | grep -- "$1" )
|
||||
do mv -iv -- "$file" "$( echo $file | sed s/"$1"/"$2"/g )"
|
||||
done
|
||||
24
files/rm_bg
Executable file
24
files/rm_bg
Executable file
@@ -0,0 +1,24 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
function helpexit() {
|
||||
echo Delete files in background, by moving the to a temp folder first.
|
||||
echo This command is always recursive!
|
||||
exit
|
||||
}
|
||||
|
||||
[[ -z "$1" ]] && helpexit
|
||||
[[ "$1" = "-h" ]] && helpexit
|
||||
|
||||
tempfolders=()
|
||||
for f in "$@"; do
|
||||
d=$( readlink -nf $( dirname "$f" ) )/.rm_bg.$$
|
||||
mkdir -p "$d"
|
||||
mv $f "$d"/
|
||||
tempfolders+=( "$d" )
|
||||
done
|
||||
|
||||
(
|
||||
for (( i=0; $i<${#tempfolders[@]}; i++ )); do
|
||||
rm -rf "${tempfolders[$i]}"
|
||||
done
|
||||
) &
|
||||
Reference in New Issue
Block a user