more management via flees-manager
This commit is contained in:
@@ -16,7 +16,7 @@ The name comes from mispronouncing "files" very badly.
|
|||||||
|
|
||||||
|
|
||||||
- configure shares with data/shares.json
|
- configure shares with data/shares.json
|
||||||
- generate shares with utils/create-share.py
|
- generate and manage shares with utils/flees-manager.py
|
||||||
- configure service with data/config.json
|
- configure service with data/config.json
|
||||||
- uid = user id for new files
|
- uid = user id for new files
|
||||||
- workers = parallel processes (i.e. one upload reserves a process)
|
- workers = parallel processes (i.e. one upload reserves a process)
|
||||||
|
|||||||
14
code/app.py
14
code/app.py
@@ -4,7 +4,7 @@
|
|||||||
import os,sys,time,stat
|
import os,sys,time,stat
|
||||||
import json
|
import json
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from flask import Flask, render_template, jsonify, \
|
from flask import Flask, render_template, jsonify, current_app, \
|
||||||
redirect, url_for, request, g, session, send_file, send_from_directory
|
redirect, url_for, request, g, session, send_file, send_from_directory
|
||||||
from werkzeug.utils import secure_filename
|
from werkzeug.utils import secure_filename
|
||||||
import hashlib
|
import hashlib
|
||||||
@@ -73,6 +73,8 @@ def upload(name = None, password = None):
|
|||||||
(ok,share) = get_share(name)
|
(ok,share) = get_share(name)
|
||||||
if not ok:
|
if not ok:
|
||||||
return share
|
return share
|
||||||
|
if not get_or_none(share,'upload') == True:
|
||||||
|
return "Upload not allowed",400
|
||||||
if file:
|
if file:
|
||||||
filename = os.path.join(
|
filename = os.path.join(
|
||||||
share['path'],
|
share['path'],
|
||||||
@@ -85,7 +87,15 @@ def upload(name = None, password = None):
|
|||||||
return "Overwrite forbidden", 403
|
return "Overwrite forbidden", 403
|
||||||
file.save(filename)
|
file.save(filename)
|
||||||
set_rights(filename)
|
set_rights(filename)
|
||||||
return redirect(url_for('list_view',name=name))
|
response = current_app.response_class(
|
||||||
|
response="File uploaded",
|
||||||
|
status=307,
|
||||||
|
mimetype="text/plain"
|
||||||
|
)
|
||||||
|
response.headers["Location"] = url_for('list_view',name=name)
|
||||||
|
return response
|
||||||
|
else:
|
||||||
|
return "Use the 'file' variable to upload",400
|
||||||
|
|
||||||
@app.route('/send/<name>', methods=['GET'])
|
@app.route('/send/<name>', methods=['GET'])
|
||||||
def send(name):
|
def send(name):
|
||||||
|
|||||||
@@ -1,11 +1,13 @@
|
|||||||
{
|
{
|
||||||
"__comment": [
|
"__comment": [
|
||||||
|
"public_url: used for displaying REST API URLs in the command line utility",
|
||||||
"workers: number of parallel processes. single long upload reserves a process.",
|
"workers: number of parallel processes. single long upload reserves a process.",
|
||||||
"timeout: seconds for process to last. single long upload cant take longer than this.",
|
"timeout: seconds for process to last. single long upload cant take longer than this.",
|
||||||
"uid: Docker runs as root, this changes owner of written files. -1 to skip chowning",
|
"uid: Docker runs as root, this changes owner of written files. -1 to skip chowning",
|
||||||
"max_zip_size: dont allow zip downloads if folder size exceeds this many megabytes",
|
"max_zip_size: dont allow zip downloads if folder size exceeds this many megabytes",
|
||||||
"app_secret_key: used to encrypt session cookie"
|
"app_secret_key: used to encrypt session cookie"
|
||||||
],
|
],
|
||||||
|
"public_url": "https://my.server.com/flees",
|
||||||
"workers": 8,
|
"workers": 8,
|
||||||
"timeout": 3600,
|
"timeout": 3600,
|
||||||
"uid": 1000,
|
"uid": 1000,
|
||||||
|
|||||||
@@ -1,70 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
import hashlib,argparse,json,sys,os
|
|
||||||
from datetime import datetime
|
|
||||||
from shutil import copyfile
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description='Flees share template generator')
|
|
||||||
|
|
||||||
parser.add_argument('-n','--name', action="store", dest="name", required = True)
|
|
||||||
parser.add_argument('-p','--path', action="store", dest="path", required = True,help= "path relative to data folder")
|
|
||||||
parser.add_argument('-P','--public', action="store_true", dest="public", default = False)
|
|
||||||
parser.add_argument('-u','--upload', action="store_true", dest="upload", default = False)
|
|
||||||
parser.add_argument('-o','--overwrite', action="store_false", dest="overwrite", default = True,
|
|
||||||
help = "Disable file overwrites")
|
|
||||||
parser.add_argument('-d','--direct', action="store_true", dest="direct", default = False,
|
|
||||||
help = "Allow direct file sharing (password hash included in URL)")
|
|
||||||
parser.add_argument('--pass-plain', action="store", dest="plain", default = False)
|
|
||||||
parser.add_argument('--pass-hashed', action="store", dest="hashed", default = False)
|
|
||||||
parser.add_argument('-e','--expire', action="store", dest="expire", default = False, help = "expire date in format 2018-12-24 21:00")
|
|
||||||
|
|
||||||
parser.add_argument('-s','--shares', action="store", dest="shares", default = False,
|
|
||||||
help = "Your current shares.json file")
|
|
||||||
parser.add_argument('-i','--insert', action="store_true", dest="insert", default = False,
|
|
||||||
help = "If 'shares' defined, insert new share directly in the shares.json file")
|
|
||||||
|
|
||||||
opts = parser.parse_args()
|
|
||||||
|
|
||||||
share = {
|
|
||||||
'name': opts.name,
|
|
||||||
'path': opts.path,
|
|
||||||
'public': opts.public,
|
|
||||||
'upload': opts.upload,
|
|
||||||
'overwrite': opts.overwrite,
|
|
||||||
'direct_links': opts.direct,
|
|
||||||
}
|
|
||||||
if opts.plain:
|
|
||||||
share.update({
|
|
||||||
'pass_plain': opts.plain
|
|
||||||
})
|
|
||||||
if opts.hashed:
|
|
||||||
share.update({
|
|
||||||
'pass_hash': hashlib.sha1(opts.hashed).hexdigest()
|
|
||||||
})
|
|
||||||
if opts.expire:
|
|
||||||
try:
|
|
||||||
date_object = datetime.strptime(opts.expire,"%Y-%m-%d %H:%M")
|
|
||||||
except ValueError as e:
|
|
||||||
print(e)
|
|
||||||
sys.exit(1)
|
|
||||||
share.update({
|
|
||||||
'expire': opts.expire
|
|
||||||
})
|
|
||||||
|
|
||||||
if opts.shares:
|
|
||||||
if os.path.exists(opts.shares):
|
|
||||||
shares = json.load(open(opts.shares,'rt'))
|
|
||||||
else:
|
|
||||||
shares = []
|
|
||||||
shares.append(share)
|
|
||||||
if opts.insert:
|
|
||||||
print("creating backup %s"%(opts.shares+".bkp",))
|
|
||||||
copyfile(opts.shares,opts.shares+".bkp")
|
|
||||||
with open(opts.shares,'wt') as fp:
|
|
||||||
json.dump(shares, fp, indent = 2, sort_keys = True)
|
|
||||||
print("Wrote file %s"%(opts.shares,))
|
|
||||||
else:
|
|
||||||
print(json.dumps(shares, indent = 2, sort_keys = True))
|
|
||||||
else:
|
|
||||||
print(json.dumps(share, indent = 2, sort_keys = True))
|
|
||||||
|
|
||||||
|
|
||||||
@@ -18,6 +18,23 @@ def get_or_no(key,d,no):
|
|||||||
return d[key]
|
return d[key]
|
||||||
return no
|
return no
|
||||||
|
|
||||||
|
def get_direct_token(share, filename):
|
||||||
|
if not 'pass_hash' in share:
|
||||||
|
return None
|
||||||
|
return hashlib.sha1(
|
||||||
|
share['pass_hash'].encode('utf-8') + filename.encode('utf-8')
|
||||||
|
).hexdigest()
|
||||||
|
|
||||||
|
|
||||||
|
def get_root_path(opts):
|
||||||
|
root_folder = os.path.dirname(
|
||||||
|
os.path.dirname(
|
||||||
|
os.path.abspath(
|
||||||
|
opts.config
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return root_folder
|
||||||
|
|
||||||
def file_size_human(num):
|
def file_size_human(num):
|
||||||
for x in ['B','KB','MB','GB','TB']:
|
for x in ['B','KB','MB','GB','TB']:
|
||||||
@@ -34,6 +51,8 @@ def list_shares(shares,opts):
|
|||||||
public = get_or_no('public',share, False)
|
public = get_or_no('public',share, False)
|
||||||
password = 'pass_hash' in share or 'pass_plain' in share
|
password = 'pass_hash' in share or 'pass_plain' in share
|
||||||
if opts.show_password:
|
if opts.show_password:
|
||||||
|
if not password:
|
||||||
|
password = ""
|
||||||
if 'pass_plain' in share:
|
if 'pass_plain' in share:
|
||||||
password = hashlib.sha1(share['pass_plain'].encode('utf-8')).hexdigest()
|
password = hashlib.sha1(share['pass_plain'].encode('utf-8')).hexdigest()
|
||||||
if 'pass_hash' in share:
|
if 'pass_hash' in share:
|
||||||
@@ -59,16 +78,19 @@ def list_folders(shares,config):
|
|||||||
if 'data_folder' not in config:
|
if 'data_folder' not in config:
|
||||||
print("data_folder not defined in config")
|
print("data_folder not defined in config")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
folders = sorted(os.listdir(config['data_folder']))
|
data_folder = os.path.join(config['__root_path__'], config['data_folder'])
|
||||||
|
folders = sorted(os.listdir(data_folder))
|
||||||
table = []
|
table = []
|
||||||
table.append( ('Path','Share','Size','Unit') )
|
table.append( ('Path','Share','Size','Unit') )
|
||||||
for folder in folders:
|
for folder in folders:
|
||||||
full_path = os.path.join(config['data_folder'], folder)
|
full_path = os.path.join(data_folder, folder)
|
||||||
if not os.path.isdir(full_path):
|
if not os.path.isdir(full_path):
|
||||||
continue
|
continue
|
||||||
share_name = "[unused by any share]"
|
share_name = "[unused by any share]"
|
||||||
for share in shares:
|
for share in shares:
|
||||||
share_path = os.path.join(config['data_folder'], share['path'])
|
share_path = os.path.join(data_folder, share['path'])
|
||||||
|
if not os.path.exists(share_path):
|
||||||
|
break
|
||||||
if os.path.samefile(full_path, share_path):
|
if os.path.samefile(full_path, share_path):
|
||||||
share_name = share['name']
|
share_name = share['name']
|
||||||
break
|
break
|
||||||
@@ -83,6 +105,52 @@ def list_folders(shares,config):
|
|||||||
))
|
))
|
||||||
print(tabulate(table, headers = "firstrow"))
|
print(tabulate(table, headers = "firstrow"))
|
||||||
|
|
||||||
|
def add_share(shares, config, opts):
|
||||||
|
|
||||||
|
share = {
|
||||||
|
'name': opts.name,
|
||||||
|
'path': opts.path,
|
||||||
|
'public': opts.public,
|
||||||
|
'upload': opts.upload,
|
||||||
|
'overwrite': opts.overwrite,
|
||||||
|
'direct_links': opts.direct,
|
||||||
|
}
|
||||||
|
if opts.plain:
|
||||||
|
share.update({
|
||||||
|
'pass_plain': opts.plain
|
||||||
|
})
|
||||||
|
if opts.hashed:
|
||||||
|
share.update({
|
||||||
|
'pass_hash': hashlib.sha1(opts.hashed).hexdigest()
|
||||||
|
})
|
||||||
|
if opts.expire:
|
||||||
|
try:
|
||||||
|
date_object = datetime.strptime(opts.expire,"%Y-%m-%d %H:%M")
|
||||||
|
except ValueError as e:
|
||||||
|
print(e)
|
||||||
|
sys.exit(1)
|
||||||
|
share.update({
|
||||||
|
'expire': opts.expire
|
||||||
|
})
|
||||||
|
|
||||||
|
if opts.insert:
|
||||||
|
shares.append(share)
|
||||||
|
shares_file = os.path.join(config['__root_path__'], opts.shares_file)
|
||||||
|
if os.path.exists(shares_file):
|
||||||
|
print("creating backup %s"%(shares_file+".bkp",))
|
||||||
|
copyfile(
|
||||||
|
shares_file,
|
||||||
|
shares_file+".bkp"
|
||||||
|
)
|
||||||
|
with open(shares_file,'wt') as fp:
|
||||||
|
json.dump(shares, fp, indent = 2, sort_keys = True)
|
||||||
|
print("Wrote file %s"%(shares_file,))
|
||||||
|
print("Add share: %s"%( opts.name, ))
|
||||||
|
else:
|
||||||
|
print("Share not saved anywhere. Save with -i")
|
||||||
|
print(json.dumps(share, indent = 2, sort_keys = True))
|
||||||
|
|
||||||
|
|
||||||
def remove_share(shares,config,opts):
|
def remove_share(shares,config,opts):
|
||||||
name = opts.name
|
name = opts.name
|
||||||
share = [share for share in shares if share['name'] == name]
|
share = [share for share in shares if share['name'] == name]
|
||||||
@@ -92,56 +160,189 @@ def remove_share(shares,config,opts):
|
|||||||
|
|
||||||
shares = [share for share in shares if share['name'] != name]
|
shares = [share for share in shares if share['name'] != name]
|
||||||
|
|
||||||
share_file = config['shares_file']
|
shares_file = os.path.join(config['__root_path__'], opts.shares_file)
|
||||||
print("creating backup %s"%(share_file+".bkp",))
|
print("creating backup %s"%(shares_file+".bkp",))
|
||||||
copyfile(share_file, share_file+".bkp")
|
copyfile(shares_file, shares_file+".bkp")
|
||||||
with open(share_file,'wt') as fp:
|
with open(shares_file,'wt') as fp:
|
||||||
json.dump(shares, fp, indent = 2, sort_keys = True)
|
json.dump(shares, fp, indent = 2, sort_keys = True)
|
||||||
print("Removed %s from %s"%(name, share_file))
|
print("Removed %s from %s"%(name, shares_file))
|
||||||
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description='Flees share manager')
|
def print_rest_api(shares, config, opts):
|
||||||
parser.add_argument('-c','--config', action="store", dest="config", default = "data/config.json",
|
if 'public_url' not in config:
|
||||||
help = "Your current config.json file")
|
print("Set public_url variable in your config.json")
|
||||||
parser.add_argument('-s','--shares', action="store", dest="shares_file", default = None,
|
sys.exit(1)
|
||||||
help = "shares.json you want to use. Defaults to what config.json defines")
|
|
||||||
|
|
||||||
subparsers = parser.add_subparsers(help='sub-command help', dest='subparser_name')
|
shares = [share for share in shares if share['name'] == opts.name]
|
||||||
|
if len(shares) == 0:
|
||||||
|
print("No such share %s"%( opts.name, ))
|
||||||
|
sys.exit(1)
|
||||||
|
share = shares[0]
|
||||||
|
|
||||||
parser_list = subparsers.add_parser('list', help = "List shares")
|
if opts.type == "list":
|
||||||
parser_list.add_argument('-P', action="store_true", dest="show_password", default = False,
|
print("Link to list contents of the share:")
|
||||||
help = "Display hashed passwords")
|
print("%s/list/%s"%(
|
||||||
|
config['public_url'],
|
||||||
|
share['name']
|
||||||
|
))
|
||||||
|
return
|
||||||
|
if not 'pass_hash' in share:
|
||||||
|
print("REST API enabled only if pass_hash is set for share")
|
||||||
|
sys.exit(1)
|
||||||
|
if opts.type == "login":
|
||||||
|
print("Link to automatically login in the share:")
|
||||||
|
print("%s/list/%s/%s"%(
|
||||||
|
config['public_url'],
|
||||||
|
share['name'],
|
||||||
|
share['pass_hash']
|
||||||
|
))
|
||||||
|
elif opts.type == "upload":
|
||||||
|
if 'upload' not in share or not share['upload']:
|
||||||
|
print("Uploading not allowed to this share")
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
parser_folders = subparsers.add_parser('folders', help = "List folders and share names")
|
print("Link to upload file to the share:")
|
||||||
|
print("curl -F file=@'the_file_name.ext' %s/upload/%s/%s"%(
|
||||||
parser_remove = subparsers.add_parser('remove', help = "Remove a share")
|
config['public_url'],
|
||||||
parser_remove.add_argument(dest="name")
|
share['name'],
|
||||||
|
share['pass_hash']
|
||||||
|
))
|
||||||
|
elif opts.type == "download":
|
||||||
|
print("Links to download files:")
|
||||||
|
share_path = os.path.join(
|
||||||
|
config['__root_path__'],
|
||||||
|
config['data_folder'],
|
||||||
|
share['path']
|
||||||
|
)
|
||||||
|
if not os.path.exists(share_path):
|
||||||
|
print("no files")
|
||||||
|
sys.exit(0)
|
||||||
|
for filename in sorted(os.listdir(share_path)):
|
||||||
|
print("%s/download/%s/%s/%s"%(
|
||||||
|
config['public_url'],
|
||||||
|
share['name'],
|
||||||
|
share['pass_hash'],
|
||||||
|
filename
|
||||||
|
))
|
||||||
|
elif opts.type == "direct":
|
||||||
|
if 'direct_links' not in share or not share['direct_links']:
|
||||||
|
print("Direct downloading not allowed in this share")
|
||||||
|
sys.exit(0)
|
||||||
|
print("Links to direct download files:")
|
||||||
|
share_path = os.path.join(
|
||||||
|
config['__root_path__'],
|
||||||
|
config['data_folder'],
|
||||||
|
share['path']
|
||||||
|
)
|
||||||
|
if not os.path.exists(share_path):
|
||||||
|
print("no files")
|
||||||
|
sys.exit(0)
|
||||||
|
for filename in sorted(os.listdir(share_path)):
|
||||||
|
print("%s/direct/%s/%s/%s"%(
|
||||||
|
config['public_url'],
|
||||||
|
share['name'],
|
||||||
|
get_direct_token(share,filename),
|
||||||
|
filename
|
||||||
|
))
|
||||||
|
elif opts.type == "zip":
|
||||||
|
print("ZIP download:")
|
||||||
|
print("%s/zip/%s/%s"%(
|
||||||
|
config['public_url'],
|
||||||
|
share['name'],
|
||||||
|
share['pass_hash']
|
||||||
|
))
|
||||||
|
|
||||||
|
|
||||||
opts = parser.parse_args()
|
|
||||||
config = {}
|
def parse_options():
|
||||||
if os.path.exists(opts.config):
|
config_default = os.path.realpath(
|
||||||
config = json.load(open(opts.config,'rt'))
|
os.path.join(
|
||||||
else:
|
os.path.dirname(
|
||||||
if opts.shares == None:
|
os.path.realpath(__file__)
|
||||||
|
),
|
||||||
|
"..",
|
||||||
|
"data",
|
||||||
|
"config.json"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(description='Flees share manager')
|
||||||
|
parser.add_argument('-c','--config', action="store", dest="config", default = config_default,
|
||||||
|
help = "Your current config.json file [%(default)s]")
|
||||||
|
parser.add_argument('-s','--shares', action="store", dest="shares_file", default = None,
|
||||||
|
help = "shares.json you want to use. Defaults to what config.json defines")
|
||||||
|
|
||||||
|
subparsers = parser.add_subparsers(help='sub-command help', dest='subparser_name')
|
||||||
|
## list shares
|
||||||
|
parser_list = subparsers.add_parser('list', help = "List shares")
|
||||||
|
parser_list.add_argument('-P', action="store_true", dest="show_password", default = False,
|
||||||
|
help = "Display hashed passwords")
|
||||||
|
## list folders
|
||||||
|
parser_folders = subparsers.add_parser('folders', help = "List folders and share names")
|
||||||
|
## Remove
|
||||||
|
parser_remove = subparsers.add_parser('remove', help = "Remove a share")
|
||||||
|
parser_remove.add_argument(dest="name")
|
||||||
|
## Add
|
||||||
|
parser_add = subparsers.add_parser('add', help = "Add a share")
|
||||||
|
parser_add.add_argument('-n','--name', action="store", dest="name", required = True)
|
||||||
|
parser_add.add_argument('-p','--path', action="store", dest="path", required = True,
|
||||||
|
help= "path relative to data folder"
|
||||||
|
)
|
||||||
|
parser_add.add_argument('-P','--public', action="store_true", dest="public", default = False)
|
||||||
|
parser_add.add_argument('-u','--upload', action="store_true", dest="upload", default = False)
|
||||||
|
parser_add.add_argument('-o','--overwrite', action="store_false", dest="overwrite", default = True,
|
||||||
|
help = "Disable file overwrites")
|
||||||
|
parser_add.add_argument('-d','--direct', action="store_true", dest="direct", default = False,
|
||||||
|
help = "Allow direct file sharing (password hash included in URL)")
|
||||||
|
parser_add.add_argument('--pass-plain', action="store", dest="plain", default = False)
|
||||||
|
parser_add.add_argument('--pass-hash', action="store", dest="hashed", default = False)
|
||||||
|
parser_add.add_argument('-e','--expire', action="store", dest="expire", default = False,
|
||||||
|
help = "expire date in format '%Y-%m-%d %H:%M' ex. '2018-12-24 21:00'"
|
||||||
|
)
|
||||||
|
parser_add.add_argument('-i','--insert', action="store_true", dest="insert", default = False,
|
||||||
|
help = "Insert new share directly in the shares.json file"
|
||||||
|
)
|
||||||
|
## REST
|
||||||
|
parser_rest = subparsers.add_parser('rest', help = "Display REST API links")
|
||||||
|
parser_rest.add_argument(dest="name", help = "Name of the share")
|
||||||
|
parser_rest.add_argument(dest="type", help = "Type of command",
|
||||||
|
choices = ['list','login','upload','download','direct','zip']
|
||||||
|
)
|
||||||
|
|
||||||
|
return parser.parse_args()
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
opts = parse_options()
|
||||||
|
config = {}
|
||||||
|
if os.path.exists(opts.config):
|
||||||
|
config = json.load(open(opts.config,'rt'))
|
||||||
|
config['__root_path__'] = get_root_path(opts)
|
||||||
|
else:
|
||||||
print("config file %s does not exist!"%(opts.config,))
|
print("config file %s does not exist!"%(opts.config,))
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if opts.shares_file:
|
if opts.shares_file:
|
||||||
config['shares_file'] = opts.shares_file
|
config['shares_file'] = opts.shares_file
|
||||||
|
if 'shares_file' in config:
|
||||||
|
# if not from command line, read from config
|
||||||
|
opts.shares_file = config['shares_file']
|
||||||
|
|
||||||
if os.path.exists(config['shares_file']):
|
if os.path.exists(os.path.join(config['__root_path__'],config['shares_file'])):
|
||||||
shares = json.load(open(config['shares_file'],'rt'))
|
shares = json.load(open(os.path.join(config['__root_path__'],config['shares_file']),'rt'))
|
||||||
else:
|
else:
|
||||||
print("shares_file %s does not exist!"%(config['shares_file']))
|
print("shares_file %s does not exist!"%(os.path.join(config['__root_path__'],config['shares_file'])))
|
||||||
sys.exit(1)
|
shares = []
|
||||||
|
|
||||||
if opts.subparser_name == 'list':
|
if opts.subparser_name == 'list':
|
||||||
list_shares(shares,opts)
|
list_shares(shares,opts)
|
||||||
elif opts.subparser_name == 'folders':
|
elif opts.subparser_name == 'folders':
|
||||||
list_folders(shares,config)
|
list_folders(shares,config)
|
||||||
elif opts.subparser_name == 'remove':
|
elif opts.subparser_name == 'remove':
|
||||||
remove_share(shares,config,opts)
|
remove_share(shares,config,opts)
|
||||||
|
elif opts.subparser_name == 'add':
|
||||||
|
add_share(shares,config,opts)
|
||||||
|
elif opts.subparser_name == 'rest':
|
||||||
|
print_rest_api(shares,config,opts)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user