|
@@ -1,4 +1,5 @@
|
|
|
#!/usr/bin/env python3
|
|
|
+import hashlib
|
|
|
import sys
|
|
|
import time
|
|
|
import uuid
|
|
@@ -7,6 +8,8 @@ import requests
|
|
|
import argparse
|
|
|
import os.path
|
|
|
|
|
|
+import retroshare
|
|
|
+
|
|
|
try:
|
|
|
from colors import color
|
|
|
except ImportError:
|
|
@@ -176,13 +179,15 @@ def main_channel_post(args):
|
|
|
return main_channel_post_v1(args)
|
|
|
|
|
|
|
|
|
-def _file_publish(args, fnames):
|
|
|
+def get_fsdb(args):
|
|
|
if Fsdb is None:
|
|
|
raise Exception('ERROR: library Fsdb is needed for file publishing')
|
|
|
store_dir = os.path.expanduser('~/.config/rscli/store/default')
|
|
|
- fsdb = Fsdb(store_dir, fmode='660')
|
|
|
+ return Fsdb(store_dir, fmode='660')
|
|
|
|
|
|
- virtualname_path = os.path.join(store_dir, 'virtualname.txt')
|
|
|
+def _file_publish(args, fnames):
|
|
|
+ fsdb = get_fsdb(args)
|
|
|
+ virtualname_path = os.path.join(fsdb.fsdbRoot, 'virtualname.txt')
|
|
|
if os.path.exists(virtualname_path):
|
|
|
virtualname = open(virtualname_path).read().strip()
|
|
|
else:
|
|
@@ -190,7 +195,7 @@ def _file_publish(args, fnames):
|
|
|
open(virtualname_path, 'w').write(virtualname)
|
|
|
r = req(args, '/rsFiles/getSharedDirectories')
|
|
|
if virtualname not in [shared['virtualname'] for shared in r.json()['dirs']]:
|
|
|
- r = req(args, '/rsFiles/addSharedDirectory', {'dir':{ 'filename': store_dir,
|
|
|
+ r = req(args, '/rsFiles/addSharedDirectory', {'dir':{ 'filename': fsdb.fsdbRoot,
|
|
|
'virtualname': virtualname
|
|
|
}})
|
|
|
if not r.json()['retval']:
|
|
@@ -213,27 +218,50 @@ def _file_publish(args, fnames):
|
|
|
|
|
|
for fname in fnames:
|
|
|
digest = fsdb.add(fname)
|
|
|
- r = req(args, '/rsFiles/ForceDirectoryCheck')
|
|
|
- time.sleep(5)
|
|
|
- # mo lo ricerchiamo va
|
|
|
- looking_for = os.path.relpath(fsdb.get_file_path(digest),
|
|
|
- store_dir).split(os.path.sep)
|
|
|
- for next_component in looking_for:
|
|
|
- r = req(args, '/rsFiles/requestDirDetails', { 'handle': handle })
|
|
|
- found = [c for c in r.json()['details']['children']
|
|
|
- if c['name'] == next_component]
|
|
|
- if not found:
|
|
|
- raise Exception('Error: could not find shared file in RS')
|
|
|
- handle = found[0]['handle']
|
|
|
- r = req(args, '/rsFiles/requestDirDetails', { 'handle': handle })
|
|
|
- filehash = r.json()['details']['hash']
|
|
|
- yield filehash
|
|
|
+ # correct implementation: check hash inside storage
|
|
|
+ # unfortunately it is very slow because we can't give
|
|
|
+ # an hint to ForceDirectoryCheck for where to look
|
|
|
+ # for new files
|
|
|
+ # after the comment, there is a valid implementation, which has the defect
|
|
|
+ # of being very coupled to retroshare implementation
|
|
|
+ # r = req(args, '/rsFiles/ForceDirectoryCheck')
|
|
|
+ #time.sleep(5)
|
|
|
+ ## mo lo ricerchiamo va
|
|
|
+ #looking_for = os.path.relpath(fsdb.get_file_path(digest),
|
|
|
+ # fsdb.fsdbRoot).split(os.path.sep)
|
|
|
+ #hashlib
|
|
|
+ #for next_component in looking_for:
|
|
|
+ # r = req(args, '/rsFiles/requestDirDetails', { 'handle': handle })
|
|
|
+ # found = [c for c in r.json()['details']['children']
|
|
|
+ # if c['name'] == next_component]
|
|
|
+ # if not found:
|
|
|
+ # raise Exception('Error: could not find shared file in RS')
|
|
|
+ # handle = found[0]['handle']
|
|
|
+ #r = req(args, '/rsFiles/requestDirDetails', { 'handle': handle })
|
|
|
+ #filehash = r.json()['details']['hash']
|
|
|
+ #yield filehash
|
|
|
+
|
|
|
+ h = hashlib.new('sha1')
|
|
|
+ h.update(open(fname, 'rb').read())
|
|
|
+ yield h.hexdigest()
|
|
|
+
|
|
|
+ r = req(args, '/rsFiles/ForceDirectoryCheck')
|
|
|
+
|
|
|
+def get_file_link(args, hash_digest, fname=None, size=None):
|
|
|
+ fsdb = get_fsdb(args)
|
|
|
+ if fname is None:
|
|
|
+ # TODO: check file name on filesystem
|
|
|
+ fname = os.path.basename(fsdb.get_file_path(hash_digest))
|
|
|
+ if size is None:
|
|
|
+ size = os.stat(fsdb.get_file_path(hash_digest)).st_size
|
|
|
+ return 'retroshare://file?name=%s&size=%d&hash=%s' % (fname, size, hash_digest)
|
|
|
|
|
|
|
|
|
def main_file_publish(args):
|
|
|
ret = _file_publish(args, args.fnames)
|
|
|
for filehash, fname in zip(ret, args.fnames):
|
|
|
print(color(filehash, fg='green') + ' \t%s' % fname)
|
|
|
+ print(' ' + get_file_link(args, filehash, fname=fname))
|
|
|
|
|
|
def get_parser():
|
|
|
p = argparse.ArgumentParser()
|