Merge branch 'upload'
This commit is contained in:
commit
263f71f7cd
6 changed files with 199 additions and 21 deletions
|
@ -1,7 +0,0 @@
|
|||
ansicolors==1.1.8
|
||||
certifi==2019.11.28
|
||||
chardet==3.0.4
|
||||
idna==2.8
|
||||
pkg-resources==0.0.0
|
||||
requests==2.22.0
|
||||
urllib3==1.25.7
|
|
@ -4,29 +4,29 @@ from pprint import pprint
|
|||
import requests
|
||||
import argparse
|
||||
|
||||
from rscli import rsfiles
|
||||
from rscli.httputils import req
|
||||
|
||||
try:
|
||||
from colors import color
|
||||
has_colors = True
|
||||
except ImportError:
|
||||
has_colors = False
|
||||
|
||||
if not has_colors or not sys.stdout.isatty():
|
||||
def color(text, *args, **kwargs):
|
||||
return text
|
||||
|
||||
|
||||
|
||||
def err(msg):
|
||||
print(color(msg, fg="red", style="bold"))
|
||||
|
||||
|
||||
def is_group_subscribed(mSubscribeFlags):
|
||||
return bool(mSubscribeFlags & 4)
|
||||
|
||||
|
||||
def req(args, location, data=None):
|
||||
kwargs = {}
|
||||
if data is not None:
|
||||
kwargs["json"] = data
|
||||
r = requests.post(
|
||||
args.endpoint + location, auth=tuple(args.auth.split(":", 2)), **kwargs
|
||||
)
|
||||
r.raise_for_status()
|
||||
# TODO: handle r.status_code != 200
|
||||
return r
|
||||
|
||||
|
||||
def main_forum_list(args):
|
||||
r = req(args, "/rsGxsForums/getForumsSummaries")
|
||||
|
@ -47,6 +47,7 @@ def main_forum_list(args):
|
|||
def main_forum_read(args):
|
||||
r = req(args, "/rsGxsForums/getForumMsgMetaData", {"forumId": args.forum_id})
|
||||
items = r.json()["msgMetas"]
|
||||
items.sort(key=lambda p: p['mPublishTs'], reverse=True)
|
||||
if args.long:
|
||||
msgs = [item["mMsgId"] for item in items[: args.num_posts]]
|
||||
items_r = req(
|
||||
|
@ -55,13 +56,14 @@ def main_forum_read(args):
|
|||
{"forumId": args.forum_id, "msgsIds": msgs},
|
||||
)
|
||||
items = items_r.json()["msgs"]
|
||||
items.sort(key=lambda p: p['mMeta']['mPublishTs'], reverse=True)
|
||||
for item in items:
|
||||
print(color(item["mMeta"]["mMsgName"], style="bold", fg="green"))
|
||||
print()
|
||||
print(item["mMsg"]) # TODO: html2txt
|
||||
print()
|
||||
else:
|
||||
for item in posts[: args.num_posts]:
|
||||
for item in items[: args.num_posts]:
|
||||
print(color(item["mMsgName"], style="bold", fg="green"))
|
||||
print(" " + color(item["mMsgId"], style="underline"))
|
||||
|
||||
|
@ -85,6 +87,7 @@ def main_channel_list(args):
|
|||
def main_channel_read(args):
|
||||
r = req(args, "/rsGxsChannels/getContentSummaries", {"channelId": args.channel_id})
|
||||
posts = r.json()["summaries"]
|
||||
posts.sort(key=lambda p: p['mPublishTs'], reverse=True)
|
||||
if args.long:
|
||||
msgs = [post["mMsgId"] for post in posts[: args.num_posts]]
|
||||
posts_r = req(
|
||||
|
@ -93,6 +96,7 @@ def main_channel_read(args):
|
|||
{"channelId": args.channel_id, "contentsIds": msgs},
|
||||
)
|
||||
posts = posts_r.json()["posts"]
|
||||
posts.sort(key=lambda p: p['mMeta']['mPublishTs'], reverse=True)
|
||||
for post in posts:
|
||||
print(color(post["mMeta"]["mMsgName"], style="bold", fg="green"))
|
||||
print()
|
||||
|
@ -116,7 +120,6 @@ def main_channel_show(args):
|
|||
|
||||
|
||||
def main_channel_post_v1(args):
|
||||
chid = args.channel_id
|
||||
r = req(
|
||||
args,
|
||||
"/rsGxsChannels/createPost",
|
||||
|
@ -140,7 +143,6 @@ def main_channel_post_v1(args):
|
|||
|
||||
|
||||
def main_channel_post(args):
|
||||
chid = args.channel_id
|
||||
try:
|
||||
r = req(
|
||||
args,
|
||||
|
@ -165,6 +167,14 @@ def main_channel_post(args):
|
|||
return main_channel_post_v1(args)
|
||||
|
||||
|
||||
|
||||
def main_file_publish(args):
|
||||
ret = rsfiles.file_publish(args, args.fnames)
|
||||
for filehash, fname in zip(ret, args.fnames):
|
||||
print(color(filehash, fg="green") + " \t%s" % fname)
|
||||
print(" " + rsfiles.get_file_link(args, filehash, fname=fname))
|
||||
|
||||
|
||||
def get_parser():
|
||||
p = argparse.ArgumentParser()
|
||||
p.add_argument("--endpoint", default="http://127.0.0.1:9092")
|
||||
|
@ -214,6 +224,12 @@ def get_parser():
|
|||
forum_read.add_argument("--num-posts", type=int, default=10)
|
||||
forum_read.set_defaults(mainfunc=main_forum_read)
|
||||
|
||||
files = p_sub.add_parser("file")
|
||||
files_sub = files.add_subparsers()
|
||||
files_list = files_sub.add_parser("publish")
|
||||
files_list.add_argument("fnames", nargs="+", metavar='fname' )
|
||||
files_list.set_defaults(mainfunc=main_file_publish)
|
||||
|
||||
# TODO: channel rss -> read and convert to rss
|
||||
|
||||
return p
|
14
rscli/httputils.py
Normal file
14
rscli/httputils.py
Normal file
|
@ -0,0 +1,14 @@
|
|||
import requests
|
||||
|
||||
def req(args, location, data=None):
|
||||
kwargs = {}
|
||||
if data is not None:
|
||||
kwargs["json"] = data
|
||||
r = requests.post(
|
||||
args.endpoint + location, auth=tuple(args.auth.split(":", 2)), **kwargs
|
||||
)
|
||||
r.raise_for_status()
|
||||
# TODO: handle r.status_code != 200
|
||||
return r
|
||||
|
||||
|
29
rscli/retroshare.py
Normal file
29
rscli/retroshare.py
Normal file
|
@ -0,0 +1,29 @@
|
|||
import enum # py3.4
|
||||
|
||||
try:
|
||||
IntFlag = enum.IntFlag
|
||||
except AttributeError:
|
||||
# requires py < 3.6
|
||||
IntFlag = enum.Int
|
||||
|
||||
|
||||
class RS_FILE_HINTS(IntFlag):
|
||||
CACHE_deprecated = 0x00000001
|
||||
EXTRA = 0x00000002
|
||||
LOCAL = 0x00000004
|
||||
REMOTE = 0x00000008
|
||||
DOWNLOAD = 0x00000010
|
||||
UPLOAD = 0x00000020
|
||||
SPEC_ONLY = 0x01000000
|
||||
NETWORK_WIDE = 0x00000080
|
||||
|
||||
BROWSABLE = 0x00000100
|
||||
SEARCHABLE = 0x00000200
|
||||
|
||||
PERMISSION_MASK = 0x00000380
|
||||
|
||||
|
||||
class DIR_FLAGS(IntFlag):
|
||||
ANONYMOUS_DOWNLOAD = 0x0080
|
||||
BROWSABLE = 0x0400
|
||||
ANONYMOUS_SEARCH = 0x0800
|
101
rscli/rsfiles.py
Normal file
101
rscli/rsfiles.py
Normal file
|
@ -0,0 +1,101 @@
|
|||
import hashlib
|
||||
import time
|
||||
import uuid
|
||||
import os.path
|
||||
try:
|
||||
from fsdb import Fsdb
|
||||
except ImportError:
|
||||
Fsdb = None
|
||||
from rscli.httputils import req
|
||||
from rscli import retroshare
|
||||
|
||||
|
||||
def get_fsdb(args):
|
||||
if Fsdb is None:
|
||||
raise Exception("ERROR: library Fsdb is needed for file publishing")
|
||||
store_dir = os.path.expanduser("~/.config/rscli/store/default")
|
||||
return Fsdb(store_dir, fmode="660")
|
||||
|
||||
|
||||
def filename_to_hash(args, dir_virtualname, filename):
|
||||
r = req(args, "/rsFiles/getSharedDirectories")
|
||||
dir_filename = [
|
||||
d["filename"] for d in r.json()["dirs"] if d["virtualname"] == dir_virtualname
|
||||
][0]
|
||||
r = req(args, "/rsFiles/requestDirDetails", {"handle": 0})
|
||||
children = [
|
||||
c for c in r.json()["details"]["children"] if c["name"] != "[Extra List]"
|
||||
]
|
||||
for possibile_root in children:
|
||||
r = req(
|
||||
args, "/rsFiles/requestDirDetails", {"handle": possibile_root["handle"]}
|
||||
)
|
||||
found = [
|
||||
c for c in r.json()["details"]["children"] if c["name"] == dir_filename
|
||||
]
|
||||
if not found:
|
||||
raise Exception("Error: could not find shared file in RS")
|
||||
handle = found[0]["handle"]
|
||||
|
||||
r = req(args, '/rsFiles/ForceDirectoryCheck')
|
||||
time.sleep(5)
|
||||
looking_for = filename.split(os.path.sep)
|
||||
hashlib
|
||||
for next_component in looking_for:
|
||||
r = req(args, '/rsFiles/requestDirDetails', { 'handle': handle })
|
||||
found = [c for c in r.json()['details']['children']
|
||||
if c['name'] == next_component]
|
||||
if not found:
|
||||
raise Exception('Error: could not find shared file in RS')
|
||||
handle = found[0]['handle']
|
||||
r = req(args, '/rsFiles/requestDirDetails', { 'handle': handle })
|
||||
filehash = r.json()['details']['hash']
|
||||
return filehash
|
||||
|
||||
|
||||
def file_publish(args, fnames):
|
||||
fsdb = get_fsdb(args)
|
||||
virtualname_path = os.path.join(fsdb.fsdbRoot, "virtualname.txt")
|
||||
if os.path.exists(virtualname_path):
|
||||
virtualname = open(virtualname_path).read().strip()
|
||||
else:
|
||||
virtualname = "rscli-%s" % uuid.uuid4()
|
||||
open(virtualname_path, "w").write(virtualname)
|
||||
|
||||
r = req(args, "/rsFiles/getSharedDirectories")
|
||||
if virtualname not in [shared["virtualname"] for shared in r.json()["dirs"]]:
|
||||
r = req(
|
||||
args,
|
||||
"/rsFiles/addSharedDirectory",
|
||||
{
|
||||
"dir": {
|
||||
"filename": fsdb.fsdbRoot,
|
||||
"virtualname": virtualname,
|
||||
"shareflags": retroshare.DIR_FLAGS.ANONYMOUS_DOWNLOAD,
|
||||
}
|
||||
},
|
||||
)
|
||||
if not r.json()["retval"]:
|
||||
raise Exception("Error: could not create shared dir for default store")
|
||||
time.sleep(1)
|
||||
|
||||
for fname in fnames:
|
||||
fsdb.add(fname)
|
||||
# print(filename_to_hash(args, virtualname, fsdb.get_file_path(digest)))
|
||||
|
||||
h = hashlib.new("sha1")
|
||||
h.update(open(fname, "rb").read())
|
||||
yield h.hexdigest()
|
||||
|
||||
r = req(args, "/rsFiles/ForceDirectoryCheck")
|
||||
|
||||
|
||||
def get_file_link(args, hash_digest, fname=None, size=None):
|
||||
fsdb = get_fsdb(args)
|
||||
if fname is None:
|
||||
# TODO: check file name on filesystem
|
||||
fname = os.path.basename(fsdb.get_file_path(hash_digest))
|
||||
if size is None:
|
||||
size = os.stat(fsdb.get_file_path(hash_digest)).st_size
|
||||
return "retroshare://file?name=%s&size=%d&hash=%s" % (fname, size, hash_digest)
|
||||
|
25
setup.py
Normal file
25
setup.py
Normal file
|
@ -0,0 +1,25 @@
|
|||
from setuptools import setup
|
||||
|
||||
setup(name='rscli',
|
||||
version='0.1',
|
||||
# description='The funniest joke in the world',
|
||||
# url='http://github.com/storborg/funniest',
|
||||
author='boyska',
|
||||
author_email='',
|
||||
license='AGPL3',
|
||||
packages=['rscli'],
|
||||
|
||||
install_requires=[
|
||||
'ansicolors==1.1.8',
|
||||
'certifi==2019.11.28',
|
||||
'chardet==3.0.4',
|
||||
'idna==2.8',
|
||||
'pkg-resources==0.0.0',
|
||||
'requests==2.22.0',
|
||||
'urllib3==1.25.7',
|
||||
'Fsdb==1.2.2',
|
||||
],
|
||||
entry_points = {
|
||||
'console_scripts': ['rscli=rscli.cli:main'],
|
||||
},
|
||||
zip_safe=False)
|
Loading…
Reference in a new issue