Skip to content

Commit

Permalink
Fix zip and #86
Browse files Browse the repository at this point in the history
  • Loading branch information
RaSan147 committed Mar 3, 2024
1 parent 67cad18 commit 23337dd
Show file tree
Hide file tree
Showing 14 changed files with 488 additions and 267 deletions.
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
*.pdb
*.bak
**/*.bkp.*

pipfile

# Byte-compiled / optimized / DLL files
__pycache__/
Expand Down
27 changes: 15 additions & 12 deletions dev_src/_fs_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -199,7 +199,7 @@ def _get_tree_size(path, limit=None, must_read=False):

return total

def _get_tree_path_n_size(path, limit=-1, must_read=False, path_type="full"):
def _get_tree_path_n_size(path, limit=-1, must_read=False, path_type="full", add_dirs=False):
"""
returns a list of files[size, path] in a directory and its subdirectories.
[ [`path`, size], ... ]
Expand All @@ -212,18 +212,20 @@ def _get_tree_path_n_size(path, limit=-1, must_read=False, path_type="full"):
total = 0
start_path = path

for entry in walk_dir(path):
try:
size = entry.stat(follow_symlinks=False).st_size
except OSError:
continue
total += size
for entry in walk_dir(path, yield_dir=add_dirs):
size = 0
if not entry.is_dir():
try:
size = entry.stat(follow_symlinks=False).st_size
except OSError:
continue
total += size

if limit>0 and total>limit:
raise LimitExceed
if limit>0 and total>limit:
raise LimitExceed

if must_read and not check_access(entry.path):
continue
if must_read and not check_access(entry.path):
continue


if path_type == "full":
Expand Down Expand Up @@ -376,7 +378,8 @@ def dir_navigator(path):

for i in range(1, len(dirs)-1):
dir = dirs[i]
urls.append(urls[i-1] + urllib.parse.quote(dir, errors='surrogatepass' )+ '/' if not dir.endswith('/') else "")
# urls.append(urls[i-1] + urllib.parse.quote(dir, errors='surrogatepass' )+ '/' if not dir.endswith('/') else "")
urls.append(urls[i-1] + dir+ '/' if not dir.endswith('/') else "")
names.append(dir)

for i in range(len(names)):
Expand Down
3 changes: 3 additions & 0 deletions dev_src/_page_templates.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,9 @@ def admin_page_script():
def error_page_script():
return get_template("script_error_page.js")

def zip_page_script():
return get_template("script_zip_page.js")




Expand Down
54 changes: 44 additions & 10 deletions dev_src/_zipfly_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,21 @@ def generator(self):
# arcname will be default path
path[self.arcname] = path[self.filesystem]


if os.path.isdir(path[self.filesystem]):
print(path[self.filesystem])
if os.listdir(path[self.filesystem]):
continue # not empty
print("empty")
# Write empty directory:
z_info = zipfile.ZipInfo(path[self.arcname] + '/')
z_info.compress_type = zipfile.ZIP_STORED


zf.writestr(z_info, b'')

yield stream.get(), self.ezs
continue
z_info = zipfile.ZipInfo.from_file(
path[self.filesystem],
path[self.arcname],
Expand Down Expand Up @@ -137,15 +152,15 @@ def __call__(self, *key):


class FixSizeOrderedDict(OrderedDict, Callable_dict):
def __init__(self, *args, max=0, **kwargs):
self._max = max
super().__init__(*args, **kwargs)
def __init__(self, *args, max=0, **kwargs):
self._max = max
super().__init__(*args, **kwargs)

def __setitem__(self, key, value):
OrderedDict.__setitem__(self, key, value)
if self._max > 0:
if len(self) > self._max:
self.popitem(False)
def __setitem__(self, key, value):
OrderedDict.__setitem__(self, key, value)
if self._max > 0:
if len(self) > self._max:
self.popitem(False)

class ZIP_Manager:
def __init__(self, zip_allowed, size_limit=-1) -> None:
Expand Down Expand Up @@ -193,7 +208,7 @@ def get_id(self, path, size=None):
source_m_time = get_dir_m_time(path)
if size is None:
try:
fs = _get_tree_path_n_size(path, must_read=True, limit=self.size_limit, path_type="both")
fs = _get_tree_path_n_size(path, must_read=True, limit=self.size_limit, path_type="both", add_dirs=True)
except LimitExceed as e:
self.calculating.pop(path) # make sure to remove calculating flag (MAJOR BUG)
raise e
Expand Down Expand Up @@ -255,7 +270,7 @@ def err(msg):

if not self.calculation_cache(zid):
try:
fs = _get_tree_path_n_size(path, must_read=True, path_type="both", limit=self.size_limit)
fs = _get_tree_path_n_size(path, must_read=True, path_type="both", limit=self.size_limit, add_dirs=True)
except LimitExceed as e:
return err("DIRECTORY SIZE LIMIT EXCEED")
source_size = sum(i[1] for i in fs)
Expand Down Expand Up @@ -322,3 +337,22 @@ def err(msg):

def archive_thread(self, path, zid, size=None):
return threading.Thread(target=self.archive, args=(path, zid, size))


if __name__ == "__main__":
paths = [
{
'fs': 'test(hahah)'
},
]

zfly = ZipFly(paths = paths)

generator = zfly.generator()
print (generator)
# <generator object ZipFly.generator at 0x7f74d52bcc50>


with open("large.zip", "wb") as f:
for i in generator:
f.write(i[0])
24 changes: 19 additions & 5 deletions dev_src/clone.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ def get_list_dir(path):

def check_exist(url, path, check_method):
# print(check_method)
print("CHECK: ", url)
try:
header = session.head(url).headers

Expand All @@ -56,7 +57,7 @@ def check_exist(url, path, check_method):
if not os.path.isfile(path):
return False

if check_method =="date":
if check_method.startswith("date"):
tt = os.path.getmtime(path)

# tt = fs.st_mtime
Expand All @@ -67,7 +68,10 @@ def check_exist(url, path, check_method):
# remove microseconds, like in If-Modified-Since
local_last_modify = local_last_modify.replace(microsecond=0)

if local_last_modify == original_modify:
if local_last_modify == original_modify and check_method == "date":
return True

if local_last_modify >= original_modify and check_method == "date+":
return True

# print("LOCAL: ", path, "==", local_last_modify)
Expand Down Expand Up @@ -133,7 +137,7 @@ def dl(url, path, overwrite, check_method):

from concurrent.futures import ThreadPoolExecutor, as_completed

executor = ThreadPoolExecutor(8)
executor = ThreadPoolExecutor(6)

futures = []

Expand All @@ -144,6 +148,9 @@ def clone(url, path = "./", overwrite = False, check_exist = "date", delete_extr
overwrite: overwrite existing files reguardless of checking existance (False)
check_exist: check if file exists by "date" or "size" or None to ignore totally (date)
"""
if url[-1] != "/":
url += "/"

Q = Queue()
def get_json(url):

Expand All @@ -162,12 +169,13 @@ def run_Q(url, path = "./", overwrite = False, check_exist = "date", delete_extr

if path[-1] != "/":
path += "/"

os.makedirs(path, exist_ok=True) # make sure the directory exists even if it's empty

json = get_json(url)
if not json:
return

os.makedirs(path, exist_ok=True)


remote_list = []
Expand Down Expand Up @@ -208,7 +216,13 @@ def run_Q(url, path = "./", overwrite = False, check_exist = "date", delete_extr


if __name__ == "__main__":
clone("SOURCE_DIR", "DESTINATION_DIR", False, "date", True)
clone(
url="http://192.168.0.108:6969/7%2C%2CVP%20%20424-425/",
path="./",
overwrite=False,
check_exist="date+",
delete_extras=False
)

for future in as_completed(futures):
bool(future.result())
Expand Down
8 changes: 8 additions & 0 deletions dev_src/html_page.html
Original file line number Diff line number Diff line change
Expand Up @@ -157,6 +157,12 @@ <h2 id="player-warning"></h2>

</div>

<div id="zip-page" class="page">
<h2>ZIPPING FOLDER</h2>
<h3 id="zip-prog">Progress</h3>
<h3 id="zip-perc"></h3>
</div>

<div id="admin_page" class="page">

<h1 style="text-align: center;">Admin Page</h1>
Expand Down Expand Up @@ -209,6 +215,8 @@ <h2>This page requires JS enabled</h2>
<script src="/?video_page_script"></script>
<script src="/?admin_page_script"></script>
<script src="/?error_page_script"></script>
<script src="/?zip_page_script"></script>


<script src="/?page_handler_script"></script>

Expand Down
62 changes: 0 additions & 62 deletions dev_src/html_zip_page.html

This file was deleted.

Loading

0 comments on commit 23337dd

Please sign in to comment.