split utils, added manual tests
This commit is contained in:
@@ -11,7 +11,8 @@ RUN pip3 install -r /requirements.txt
|
|||||||
COPY static /code/static
|
COPY static /code/static
|
||||||
COPY templates /code/templates
|
COPY templates /code/templates
|
||||||
COPY utils/__init__.py /code/utils/__init__.py
|
COPY utils/__init__.py /code/utils/__init__.py
|
||||||
COPY utils/utils.py /code/utils/utils.py
|
COPY utils/files.py /code/utils/files.py
|
||||||
|
COPY utils/misc.py /code/utils/misc.py
|
||||||
COPY utils/crypt.py /code/utils/crypt.py
|
COPY utils/crypt.py /code/utils/crypt.py
|
||||||
COPY revprox.py /code/revprox.py
|
COPY revprox.py /code/revprox.py
|
||||||
COPY notifier.py /code/notifier.py
|
COPY notifier.py /code/notifier.py
|
||||||
|
|||||||
37
code/app.py
37
code/app.py
@@ -10,8 +10,7 @@ from werkzeug.utils import secure_filename
|
|||||||
import zipfile
|
import zipfile
|
||||||
from multiprocessing import Process
|
from multiprocessing import Process
|
||||||
from revprox import ReverseProxied
|
from revprox import ReverseProxied
|
||||||
from utils.utils import *
|
from utils import *
|
||||||
from utils.crypt import *
|
|
||||||
|
|
||||||
|
|
||||||
__FLEES_VERSION__ = "20181124.0"
|
__FLEES_VERSION__ = "20181124.0"
|
||||||
@@ -162,9 +161,10 @@ def upload_join_splitted(name, token):
|
|||||||
return "Invalid partial filename %s -> %s\n"%( request.form['filename'], filename), 400
|
return "Invalid partial filename %s -> %s\n"%( request.form['filename'], filename), 400
|
||||||
if not len(parts) == part_existed + 1:
|
if not len(parts) == part_existed + 1:
|
||||||
return "Parts missing\n", 400
|
return "Parts missing\n", 400
|
||||||
|
secure_name = secure_filename(request.form['filename'])
|
||||||
target_name = os.path.join(
|
target_name = os.path.join(
|
||||||
share['path'],
|
share['path'],
|
||||||
request.form['filename']
|
secure_name
|
||||||
)
|
)
|
||||||
if get_or_none('overwrite', share) == False:
|
if get_or_none('overwrite', share) == False:
|
||||||
if os.path.exists(target_name):
|
if os.path.exists(target_name):
|
||||||
@@ -174,7 +174,7 @@ def upload_join_splitted(name, token):
|
|||||||
begin = uploadJoiner(target_name, parts)
|
begin = uploadJoiner(target_name, parts)
|
||||||
except:
|
except:
|
||||||
return "Joining failed\n", 400
|
return "Joining failed\n", 400
|
||||||
download_url = get_download_url(share, request.form['filename'], token)
|
download_url = get_download_url(share, secure_name, token)
|
||||||
return "Joining started\n%s\n"%( download_url, ), 200
|
return "Joining started\n%s\n"%( download_url, ), 200
|
||||||
|
|
||||||
|
|
||||||
@@ -365,15 +365,7 @@ def file_direct(name, token, filename):
|
|||||||
allow_direct = get_or_none('direct_links', share) if get_or_none('pass_hash', share) else False
|
allow_direct = get_or_none('direct_links', share) if get_or_none('pass_hash', share) else False
|
||||||
if not allow_direct:
|
if not allow_direct:
|
||||||
return "-1", 403
|
return "-1", 403
|
||||||
token = get_direct_token(share, filename)
|
return get_download_url(share, filename, token), 200
|
||||||
# url_for returns extra level of /../path if proxying with nginx
|
|
||||||
return "/".join((
|
|
||||||
app.config['PUBLIC_URL'],
|
|
||||||
'direct',
|
|
||||||
name,
|
|
||||||
token,
|
|
||||||
path2url(filename)
|
|
||||||
)), 200
|
|
||||||
|
|
||||||
|
|
||||||
@app.route('/file/ls/<name>/<token>', methods=['GET'])
|
@app.route('/file/ls/<name>/<token>', methods=['GET'])
|
||||||
@@ -936,25 +928,6 @@ def zip_clean():
|
|||||||
os.remove(os.path.join(app.config['ZIP_FOLDER'],file))
|
os.remove(os.path.join(app.config['ZIP_FOLDER'],file))
|
||||||
|
|
||||||
|
|
||||||
def get_download_url(share, file, token):
|
|
||||||
direct = get_or_none('direct_links', share, False)
|
|
||||||
if direct:
|
|
||||||
return "/".join((
|
|
||||||
app.config['PUBLIC_URL'],
|
|
||||||
'direct',
|
|
||||||
share['name'],
|
|
||||||
get_direct_token(share, file),
|
|
||||||
path2url(file)
|
|
||||||
))
|
|
||||||
else:
|
|
||||||
return "/".join((
|
|
||||||
app.config['PUBLIC_URL'],
|
|
||||||
'download',
|
|
||||||
share['name'],
|
|
||||||
token,
|
|
||||||
path2url(file)
|
|
||||||
))
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
zip_clean()
|
zip_clean()
|
||||||
|
|||||||
@@ -3,8 +3,7 @@ import argparse,json,sys,os
|
|||||||
from shutil import copyfile
|
from shutil import copyfile
|
||||||
from tabulate import tabulate
|
from tabulate import tabulate
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from utils.utils import *
|
from utils import *
|
||||||
from utils.crypt import *
|
|
||||||
|
|
||||||
|
|
||||||
def get_root_path(opts):
|
def get_root_path(opts):
|
||||||
@@ -620,7 +619,7 @@ def print_rest_api_upload(config, share, token):
|
|||||||
token
|
token
|
||||||
))
|
))
|
||||||
print("\nLink to upload multiple files to the share, splitting large files:")
|
print("\nLink to upload multiple files to the share, splitting large files:")
|
||||||
print("\n# curl -s %s/script/upload_split/%s/%s | python - [-s split_size_in_Mb] file_to_upload.ext [second.file.ext]"%(
|
print("\n# python2 <( curl -s %s/script/upload_split/%s/%s ) [-s split_size_in_Mb] file_to_upload.ext [second.file.ext]"%(
|
||||||
config['public_url'],
|
config['public_url'],
|
||||||
share['name'],
|
share['name'],
|
||||||
token
|
token
|
||||||
|
|||||||
@@ -1 +1,3 @@
|
|||||||
|
from utils.misc import *
|
||||||
|
from utils.files import *
|
||||||
|
from utils.crypt import *
|
||||||
|
|||||||
@@ -5,21 +5,14 @@ import requests
|
|||||||
import re
|
import re
|
||||||
import json
|
import json
|
||||||
import stat
|
import stat
|
||||||
|
from .misc import *
|
||||||
|
from .crypt import *
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import magic
|
import magic
|
||||||
except ImportError:
|
except ImportError:
|
||||||
pass
|
pass
|
||||||
try:
|
|
||||||
from werkzeug.utils import secure_filename
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
try:
|
|
||||||
from urllib.request import pathname2url
|
|
||||||
from urllib.request import urlparse
|
|
||||||
except ImportError:
|
|
||||||
from urllib import pathname2url
|
|
||||||
from urlparse import urlparse
|
|
||||||
|
|
||||||
class Logger:
|
class Logger:
|
||||||
def __init__(self, filename, uid = 0, gid = 0):
|
def __init__(self, filename, uid = 0, gid = 0):
|
||||||
@@ -71,15 +64,6 @@ class Logger:
|
|||||||
fp.flush()
|
fp.flush()
|
||||||
|
|
||||||
|
|
||||||
def bool_short(var):
|
|
||||||
if type(var) == bool:
|
|
||||||
if var:
|
|
||||||
return "Y"
|
|
||||||
else:
|
|
||||||
return "N"
|
|
||||||
return var
|
|
||||||
|
|
||||||
|
|
||||||
def download_url(url, filename):
|
def download_url(url, filename):
|
||||||
try:
|
try:
|
||||||
r = requests.get(url, stream=True)
|
r = requests.get(url, stream=True)
|
||||||
@@ -132,12 +116,6 @@ def file_age(path):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def file_date_human(num):
|
|
||||||
return datetime.fromtimestamp(
|
|
||||||
num
|
|
||||||
).strftime(app.config['DATE_FORMAT'])
|
|
||||||
|
|
||||||
|
|
||||||
def file_name_version(full_path):
|
def file_name_version(full_path):
|
||||||
""" New name versioned with date of the file """
|
""" New name versioned with date of the file """
|
||||||
file_dir = os.path.dirname(full_path)
|
file_dir = os.path.dirname(full_path)
|
||||||
@@ -188,20 +166,6 @@ def file_stat(path, filename):
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def file_size_human(num, HTML = True):
|
|
||||||
space = ' ' if HTML else ' '
|
|
||||||
for x in [space + 'B', 'KB', 'MB', 'GB', 'TB']:
|
|
||||||
if num < 1024.0:
|
|
||||||
if x == space + 'B':
|
|
||||||
return "%d%s%s" % (num, space, x)
|
|
||||||
return "%3.1f%s%s" % (num, space, x)
|
|
||||||
num /= 1024.0
|
|
||||||
|
|
||||||
|
|
||||||
def file_size_MB(num):
|
|
||||||
return "{:,.2f}".format(num/(1024*1024))
|
|
||||||
|
|
||||||
|
|
||||||
def get_folder_size(path):
|
def get_folder_size(path):
|
||||||
total_size = 0
|
total_size = 0
|
||||||
for dirpath, dirnames, filenames in os.walk(path):
|
for dirpath, dirnames, filenames in os.walk(path):
|
||||||
@@ -211,11 +175,24 @@ def get_folder_size(path):
|
|||||||
return total_size
|
return total_size
|
||||||
|
|
||||||
|
|
||||||
def get_or_none(key,d,none = None):
|
def get_download_url(share, file, token):
|
||||||
if key in d:
|
direct = get_or_none('direct_links', share, False)
|
||||||
return d[key]
|
if direct:
|
||||||
|
return "/".join((
|
||||||
|
app.config['PUBLIC_URL'],
|
||||||
|
'direct',
|
||||||
|
share['name'],
|
||||||
|
get_direct_token(share, file),
|
||||||
|
path2url(file)
|
||||||
|
))
|
||||||
else:
|
else:
|
||||||
return none
|
return "/".join((
|
||||||
|
app.config['PUBLIC_URL'],
|
||||||
|
'download',
|
||||||
|
share['name'],
|
||||||
|
token,
|
||||||
|
path2url(file)
|
||||||
|
))
|
||||||
|
|
||||||
|
|
||||||
def get_script_url(public_url, share, end_point, token = "[TOKEN]"):
|
def get_script_url(public_url, share, end_point, token = "[TOKEN]"):
|
||||||
@@ -244,22 +221,6 @@ def get_script_url(public_url, share, end_point, token = "[TOKEN]"):
|
|||||||
return {'cmd': cmd, 'doc': doc}
|
return {'cmd': cmd, 'doc': doc}
|
||||||
|
|
||||||
|
|
||||||
def is_path_safe(path):
|
|
||||||
if path.startswith("."):
|
|
||||||
return False
|
|
||||||
if "/." in path:
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def is_valid_url(url, qualifying = None):
|
|
||||||
min_attributes = ('scheme', 'netloc')
|
|
||||||
qualifying = min_attributes if qualifying is None else qualifying
|
|
||||||
token = urlparse(url)
|
|
||||||
return all([getattr(token, qualifying_attr)
|
|
||||||
for qualifying_attr in qualifying])
|
|
||||||
|
|
||||||
|
|
||||||
def iter_folder_files(path, recursive = True, version_folder = None):
|
def iter_folder_files(path, recursive = True, version_folder = None):
|
||||||
if recursive:
|
if recursive:
|
||||||
for dirpath, dirnames, filenames in os.walk(path, topdown = False):
|
for dirpath, dirnames, filenames in os.walk(path, topdown = False):
|
||||||
@@ -287,10 +248,6 @@ def iter_folder_files(path, recursive = True, version_folder = None):
|
|||||||
yield fp
|
yield fp
|
||||||
|
|
||||||
|
|
||||||
def path2url(path):
|
|
||||||
return pathname2url(path)
|
|
||||||
|
|
||||||
|
|
||||||
def read_config(app):
|
def read_config(app):
|
||||||
# Read config from json
|
# Read config from json
|
||||||
config_values = json.load(open(os.getenv('FLEES_CONFIG'),'rt'))
|
config_values = json.load(open(os.getenv('FLEES_CONFIG'),'rt'))
|
||||||
@@ -318,22 +275,6 @@ def read_config(app):
|
|||||||
return config_values
|
return config_values
|
||||||
|
|
||||||
|
|
||||||
def safe_name(s):
|
|
||||||
return safe_string(s, "-_")
|
|
||||||
|
|
||||||
|
|
||||||
def safe_path(s):
|
|
||||||
return safe_string(s, "-_/")
|
|
||||||
|
|
||||||
|
|
||||||
def safe_string(s, valid, no_repeat = False):
|
|
||||||
""" return a safe string, replace non alnum characters with _ . all characters in valid are considered valid. """
|
|
||||||
safe = "".join([c if c.isalnum() or c in valid else "_" for c in s])
|
|
||||||
if no_repeat:
|
|
||||||
safe = re.sub(r'_+', '_', safe)
|
|
||||||
return safe
|
|
||||||
|
|
||||||
|
|
||||||
def set_rights(path):
|
def set_rights(path):
|
||||||
os.chown(path, app.config['UID'], app.config['GID'])
|
os.chown(path, app.config['UID'], app.config['GID'])
|
||||||
st = os.stat(path)
|
st = os.stat(path)
|
||||||
88
code/utils/misc.py
Normal file
88
code/utils/misc.py
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
from datetime import datetime
|
||||||
|
from flask import current_app as app
|
||||||
|
|
||||||
|
try:
|
||||||
|
from urllib.request import pathname2url
|
||||||
|
from urllib.request import urlparse
|
||||||
|
except ImportError:
|
||||||
|
from urllib import pathname2url
|
||||||
|
from urlparse import urlparse
|
||||||
|
|
||||||
|
try:
|
||||||
|
from werkzeug.utils import secure_filename
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
# String handling etc
|
||||||
|
|
||||||
|
def bool_short(var):
|
||||||
|
if type(var) == bool:
|
||||||
|
if var:
|
||||||
|
return "Y"
|
||||||
|
else:
|
||||||
|
return "N"
|
||||||
|
return var
|
||||||
|
|
||||||
|
|
||||||
|
def file_date_human(num):
|
||||||
|
return datetime.fromtimestamp(
|
||||||
|
num
|
||||||
|
).strftime(app.config['DATE_FORMAT'])
|
||||||
|
|
||||||
|
|
||||||
|
def file_size_human(num, HTML = True):
|
||||||
|
space = ' ' if HTML else ' '
|
||||||
|
for x in [space + 'B', 'KB', 'MB', 'GB', 'TB']:
|
||||||
|
if num < 1024.0:
|
||||||
|
if x == space + 'B':
|
||||||
|
return "%d%s%s" % (num, space, x)
|
||||||
|
return "%3.1f%s%s" % (num, space, x)
|
||||||
|
num /= 1024.0
|
||||||
|
|
||||||
|
|
||||||
|
def file_size_MB(num):
|
||||||
|
return "{:,.2f}".format(num/(1024*1024))
|
||||||
|
|
||||||
|
|
||||||
|
def get_or_none(key,d,none = None):
|
||||||
|
if key in d:
|
||||||
|
return d[key]
|
||||||
|
else:
|
||||||
|
return none
|
||||||
|
|
||||||
|
|
||||||
|
def is_path_safe(path):
|
||||||
|
if path.startswith("."):
|
||||||
|
return False
|
||||||
|
if "/." in path:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def is_valid_url(url, qualifying = None):
|
||||||
|
min_attributes = ('scheme', 'netloc')
|
||||||
|
qualifying = min_attributes if qualifying is None else qualifying
|
||||||
|
token = urlparse(url)
|
||||||
|
return all([getattr(token, qualifying_attr)
|
||||||
|
for qualifying_attr in qualifying])
|
||||||
|
|
||||||
|
|
||||||
|
def path2url(path):
|
||||||
|
return pathname2url(path)
|
||||||
|
|
||||||
|
|
||||||
|
def safe_name(s):
|
||||||
|
return safe_string(s, "-_")
|
||||||
|
|
||||||
|
|
||||||
|
def safe_path(s):
|
||||||
|
return safe_string(s, "-_/")
|
||||||
|
|
||||||
|
|
||||||
|
def safe_string(s, valid, no_repeat = False):
|
||||||
|
""" return a safe string, replace non alnum characters with _ . all characters in valid are considered valid. """
|
||||||
|
safe = "".join([c if c.isalnum() or c in valid else "_" for c in s])
|
||||||
|
if no_repeat:
|
||||||
|
safe = re.sub(r'_+', '_', safe)
|
||||||
|
return safe
|
||||||
7
test/run-server.sh
Executable file
7
test/run-server.sh
Executable file
@@ -0,0 +1,7 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
cd ..
|
||||||
|
docker-compose down &
|
||||||
|
docker-compose build
|
||||||
|
wait
|
||||||
|
docker-compose up -d
|
||||||
|
docker-compose logs --follow
|
||||||
137
test/run-tests.sh
Executable file
137
test/run-tests.sh
Executable file
@@ -0,0 +1,137 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
_qCol() {
|
||||||
|
# print "easier" mapping of ANSI colors and controls
|
||||||
|
local K="\033[1;30m"
|
||||||
|
local R="\033[1;31m"
|
||||||
|
local G="\033[1;32m"
|
||||||
|
local B="\033[1;34m"
|
||||||
|
local Y="\033[1;33m"
|
||||||
|
local M="\033[1;35m"
|
||||||
|
local C="\033[1;36m"
|
||||||
|
local W="\033[1;37m"
|
||||||
|
|
||||||
|
local k="\033[2;30m"
|
||||||
|
local r="\033[2;31m"
|
||||||
|
local g="\033[2;32m"
|
||||||
|
local b="\033[2;34m"
|
||||||
|
local y="\033[2;33m"
|
||||||
|
local m="\033[2;35m"
|
||||||
|
local c="\033[2;36m"
|
||||||
|
local w="\033[2;37m"
|
||||||
|
|
||||||
|
local bk="\033[40m"
|
||||||
|
local br="\033[41m"
|
||||||
|
local bg="\033[42m"
|
||||||
|
local by="\033[43m"
|
||||||
|
local bb="\033[44m"
|
||||||
|
local bm="\033[45m"
|
||||||
|
local bc="\033[46m"
|
||||||
|
local bw="\033[47m"
|
||||||
|
|
||||||
|
local S='\033[1m' #strong
|
||||||
|
local s='\033[2m' #strong off
|
||||||
|
local U='\033[4m' #underline
|
||||||
|
local u='\033[24m' #underline off
|
||||||
|
local z='\033[0m' #zero colors
|
||||||
|
local Z='\033[0m' #zero colors
|
||||||
|
local ic='\033[7m' #inverse colors
|
||||||
|
local io='\033[27m' #inverse off
|
||||||
|
local st='\033[9m' #strike on
|
||||||
|
local so='\033[29m' #strike off
|
||||||
|
local CLR='\033[2J' # Clear screen
|
||||||
|
local CLREND='\033[K' # Clear to end of line
|
||||||
|
local CLRBEG='\033[1K' # Clear to beginning of line
|
||||||
|
local CLRSCR="$CLR"'\033[0;0H' # Clear screen, reset cursor
|
||||||
|
|
||||||
|
local color_keys=" K R G B Y M C W k r g b y m c w S s U u z Z ic io st so bk br bg by bb bm bc bw CLR CLREND CLRBEG CLRSCR "
|
||||||
|
|
||||||
|
[[ "$1" = "export" ]] && {
|
||||||
|
local key
|
||||||
|
local prefix="$2"
|
||||||
|
[[ -z "$2" ]] && prefix=_c
|
||||||
|
for key in $color_keys; do
|
||||||
|
eval export ${prefix}${key}=\'${!key}\'
|
||||||
|
done
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
local arg val
|
||||||
|
for ((arg=1;arg<=$#;arg++)) {
|
||||||
|
val=${!arg}
|
||||||
|
[[ ${color_keys} = *" $val "* ]] || { echo "No such color code '${val}'" >&2; return 1; }
|
||||||
|
printf ${!val}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
cont() {
|
||||||
|
set +x
|
||||||
|
_qCol G
|
||||||
|
echo Continue
|
||||||
|
_qCol z
|
||||||
|
read continue
|
||||||
|
_qCol Y
|
||||||
|
echo =========================================
|
||||||
|
_qCol z
|
||||||
|
set -x
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
BIG="big file(1).ext"
|
||||||
|
BIGS="big_file1.ext"
|
||||||
|
SMALL="small file"
|
||||||
|
SMALLS="small_file"
|
||||||
|
|
||||||
|
test -f "$BIG" || dd if=/dev/zero of="$BIG" bs=8192 count=40000
|
||||||
|
test -f "$SMALL" ||dd if=/dev/urandom of="$SMALL" bs=4096 count=400
|
||||||
|
|
||||||
|
set -x
|
||||||
|
../code/flees-manager.py list
|
||||||
|
cont
|
||||||
|
|
||||||
|
../code/flees-manager.py show test -P
|
||||||
|
cont
|
||||||
|
|
||||||
|
../code/flees-manager.py modify -n test -P true -u true -d true -t testToken
|
||||||
|
cont
|
||||||
|
|
||||||
|
../code/flees-manager.py rest test flip -t testToken | tail -n 1 | xargs curl -o test-flip
|
||||||
|
chmod +x test-flip
|
||||||
|
./test-flip
|
||||||
|
cont
|
||||||
|
|
||||||
|
./test-flip update
|
||||||
|
cont
|
||||||
|
|
||||||
|
eval $( ./test-flip upload | tail -n 1 | sed -e 's,#,,' -e 's,file_to_upload.ext,,'; echo \"$BIG\" )
|
||||||
|
sleep 2
|
||||||
|
cont
|
||||||
|
|
||||||
|
./test-flip url "$BIGS" | xargs curl -s -D /dev/stderr | head -c 1 | head -c 0
|
||||||
|
cont
|
||||||
|
|
||||||
|
./test-flip w "$SMALL"
|
||||||
|
cont
|
||||||
|
|
||||||
|
./test-flip
|
||||||
|
cont
|
||||||
|
|
||||||
|
./test-flip r "$SMALLS" | sha256sum
|
||||||
|
cat "$SMALL" | sha256sum
|
||||||
|
cont
|
||||||
|
|
||||||
|
eval $( ../code/flees-manager.py rest test upload -t testToken | grep bash | sed -e 's,#,,' -e 's,file_to_upload.*,,' ; echo \"$BIG\" )
|
||||||
|
cont
|
||||||
|
|
||||||
|
echo | ./test-flip d "$SMALLS"
|
||||||
|
echo | ./test-flip d "$BIGS"
|
||||||
|
cont
|
||||||
|
|
||||||
|
../code/flees-manager.py modify -n test --remove-token testToken
|
||||||
|
cont
|
||||||
|
|
||||||
|
|
||||||
|
rm -f test-flip "$SMALL" "$BIG"
|
||||||
28
test/test-config.json
Normal file
28
test/test-config.json
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
{
|
||||||
|
"__comment": [
|
||||||
|
"workers: number of parallel processes. single long upload reserves a process.",
|
||||||
|
"timeout: seconds for process to last. single long upload cant take longer than this.",
|
||||||
|
"uid: Docker runs as root, this changes owner of written files. -1 to skip chowning",
|
||||||
|
"max_zip_size: dont allow zip downloads if folder size exceeds this many megabytes",
|
||||||
|
"app_secret_key: used to encrypt session cookie"
|
||||||
|
],
|
||||||
|
"public_url": "http://localhost:8080",
|
||||||
|
"site_name": "testsite",
|
||||||
|
"notifier": "",
|
||||||
|
"workers": 3,
|
||||||
|
"timeout": 3600,
|
||||||
|
"uid": 1000,
|
||||||
|
"gid": -1,
|
||||||
|
"timezone": "Europe/Helsinki",
|
||||||
|
"__do_not_edit": "most likely you will not change anything after this line",
|
||||||
|
"data_folder": "data",
|
||||||
|
"version_folder": "_version",
|
||||||
|
"shares_file": "data/shares.json",
|
||||||
|
"log_file": "data/flees.log",
|
||||||
|
"zip_folder": "data/.zip",
|
||||||
|
"max_zip_size": 1000,
|
||||||
|
"date_format": "%Y-%m-%d %H:%M",
|
||||||
|
"app_secret_key": "Cz2dw5NiRt3PSMFBSLTAJJi7U2CdW7iPQqEeOaU6",
|
||||||
|
"debug": true
|
||||||
|
}
|
||||||
|
|
||||||
2
test/test-env
Normal file
2
test/test-env
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
FLEES_EXPOSE=0.0.0.0:8080
|
||||||
|
FLEES_CONFIG=data/config.json
|
||||||
Reference in New Issue
Block a user