channel post supports attachments

yay!
This commit is contained in:
boyska 2020-01-12 03:04:36 +01:00
parent 263f71f7cd
commit b3d58a7552
3 changed files with 35 additions and 21 deletions

View file

@ -1,4 +1,6 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
import os
import os.path
import sys import sys
from pprint import pprint from pprint import pprint
import requests import requests
@ -9,16 +11,17 @@ from rscli.httputils import req
try: try:
from colors import color from colors import color
has_colors = True has_colors = True
except ImportError: except ImportError:
has_colors = False has_colors = False
if not has_colors or not sys.stdout.isatty(): if not has_colors or not sys.stdout.isatty():
def color(text, *args, **kwargs): def color(text, *args, **kwargs):
return text return text
def err(msg): def err(msg):
print(color(msg, fg="red", style="bold")) print(color(msg, fg="red", style="bold"))
@ -27,7 +30,6 @@ def is_group_subscribed(mSubscribeFlags):
return bool(mSubscribeFlags & 4) return bool(mSubscribeFlags & 4)
def main_forum_list(args): def main_forum_list(args):
r = req(args, "/rsGxsForums/getForumsSummaries") r = req(args, "/rsGxsForums/getForumsSummaries")
forums = r.json()["forums"] forums = r.json()["forums"]
@ -47,7 +49,7 @@ def main_forum_list(args):
def main_forum_read(args): def main_forum_read(args):
r = req(args, "/rsGxsForums/getForumMsgMetaData", {"forumId": args.forum_id}) r = req(args, "/rsGxsForums/getForumMsgMetaData", {"forumId": args.forum_id})
items = r.json()["msgMetas"] items = r.json()["msgMetas"]
items.sort(key=lambda p: p['mPublishTs'], reverse=True) items.sort(key=lambda p: p["mPublishTs"], reverse=True)
if args.long: if args.long:
msgs = [item["mMsgId"] for item in items[: args.num_posts]] msgs = [item["mMsgId"] for item in items[: args.num_posts]]
items_r = req( items_r = req(
@ -56,7 +58,7 @@ def main_forum_read(args):
{"forumId": args.forum_id, "msgsIds": msgs}, {"forumId": args.forum_id, "msgsIds": msgs},
) )
items = items_r.json()["msgs"] items = items_r.json()["msgs"]
items.sort(key=lambda p: p['mMeta']['mPublishTs'], reverse=True) items.sort(key=lambda p: p["mMeta"]["mPublishTs"], reverse=True)
for item in items: for item in items:
print(color(item["mMeta"]["mMsgName"], style="bold", fg="green")) print(color(item["mMeta"]["mMsgName"], style="bold", fg="green"))
print() print()
@ -87,7 +89,7 @@ def main_channel_list(args):
def main_channel_read(args): def main_channel_read(args):
r = req(args, "/rsGxsChannels/getContentSummaries", {"channelId": args.channel_id}) r = req(args, "/rsGxsChannels/getContentSummaries", {"channelId": args.channel_id})
posts = r.json()["summaries"] posts = r.json()["summaries"]
posts.sort(key=lambda p: p['mPublishTs'], reverse=True) posts.sort(key=lambda p: p["mPublishTs"], reverse=True)
if args.long: if args.long:
msgs = [post["mMsgId"] for post in posts[: args.num_posts]] msgs = [post["mMsgId"] for post in posts[: args.num_posts]]
posts_r = req( posts_r = req(
@ -96,7 +98,7 @@ def main_channel_read(args):
{"channelId": args.channel_id, "contentsIds": msgs}, {"channelId": args.channel_id, "contentsIds": msgs},
) )
posts = posts_r.json()["posts"] posts = posts_r.json()["posts"]
posts.sort(key=lambda p: p['mMeta']['mPublishTs'], reverse=True) posts.sort(key=lambda p: p["mMeta"]["mPublishTs"], reverse=True)
for post in posts: for post in posts:
print(color(post["mMeta"]["mMsgName"], style="bold", fg="green")) print(color(post["mMeta"]["mMsgName"], style="bold", fg="green"))
print() print()
@ -143,6 +145,16 @@ def main_channel_post_v1(args):
def main_channel_post(args): def main_channel_post(args):
attachments = [
{
"mHash": digest,
"mName": os.path.basename(fpath),
"mSize": os.stat(fpath).st_size,
}
for digest, fpath in zip(
rsfiles.file_publish(args, args.attach_fpaths), args.attach_fpaths
)
]
try: try:
r = req( r = req(
args, args,
@ -151,6 +163,7 @@ def main_channel_post(args):
"channelId": args.channel_id, "channelId": args.channel_id,
"title": args.post_title, "title": args.post_title,
"mBody": args.post_body, "mBody": args.post_body,
"files": attachments,
}, },
) )
except requests.exceptions.HTTPError as exc: except requests.exceptions.HTTPError as exc:
@ -163,11 +176,11 @@ def main_channel_post(args):
print(ret["errorMessage"]) print(ret["errorMessage"])
pprint(ret) pprint(ret)
sys.exit(1) sys.exit(1)
pprint(ret) print(color(ret["postId"], fg="green", style="bold"))
return
return main_channel_post_v1(args) return main_channel_post_v1(args)
def main_file_publish(args): def main_file_publish(args):
ret = rsfiles.file_publish(args, args.fnames) ret = rsfiles.file_publish(args, args.fnames)
for filehash, fname in zip(ret, args.fnames): for filehash, fname in zip(ret, args.fnames):
@ -206,6 +219,7 @@ def get_parser():
ch_post.set_defaults(mainfunc=main_channel_post) ch_post.set_defaults(mainfunc=main_channel_post)
ch_post.add_argument("--post-title") ch_post.add_argument("--post-title")
ch_post.add_argument("--post-body") ch_post.add_argument("--post-body")
ch_post.add_argument("--attach-name", nargs="*", dest="attach_fpaths")
forum = p_sub.add_parser("forum") forum = p_sub.add_parser("forum")
forum.add_argument("--forum-id") forum.add_argument("--forum-id")
@ -227,7 +241,7 @@ def get_parser():
files = p_sub.add_parser("file") files = p_sub.add_parser("file")
files_sub = files.add_subparsers() files_sub = files.add_subparsers()
files_list = files_sub.add_parser("publish") files_list = files_sub.add_parser("publish")
files_list.add_argument("fnames", nargs="+", metavar='fname' ) files_list.add_argument("fnames", nargs="+", metavar="fname")
files_list.set_defaults(mainfunc=main_file_publish) files_list.set_defaults(mainfunc=main_file_publish)
# TODO: channel rss -> read and convert to rss # TODO: channel rss -> read and convert to rss

View file

@ -1,5 +1,6 @@
import requests import requests
def req(args, location, data=None): def req(args, location, data=None):
kwargs = {} kwargs = {}
if data is not None: if data is not None:
@ -10,5 +11,3 @@ def req(args, location, data=None):
r.raise_for_status() r.raise_for_status()
# TODO: handle r.status_code != 200 # TODO: handle r.status_code != 200
return r return r

View file

@ -2,6 +2,7 @@ import hashlib
import time import time
import uuid import uuid
import os.path import os.path
try: try:
from fsdb import Fsdb from fsdb import Fsdb
except ImportError: except ImportError:
@ -37,19 +38,20 @@ def filename_to_hash(args, dir_virtualname, filename):
raise Exception("Error: could not find shared file in RS") raise Exception("Error: could not find shared file in RS")
handle = found[0]["handle"] handle = found[0]["handle"]
r = req(args, '/rsFiles/ForceDirectoryCheck') r = req(args, "/rsFiles/ForceDirectoryCheck")
time.sleep(5) time.sleep(5)
looking_for = filename.split(os.path.sep) looking_for = filename.split(os.path.sep)
hashlib hashlib
for next_component in looking_for: for next_component in looking_for:
r = req(args, '/rsFiles/requestDirDetails', { 'handle': handle }) r = req(args, "/rsFiles/requestDirDetails", {"handle": handle})
found = [c for c in r.json()['details']['children'] found = [
if c['name'] == next_component] c for c in r.json()["details"]["children"] if c["name"] == next_component
]
if not found: if not found:
raise Exception('Error: could not find shared file in RS') raise Exception("Error: could not find shared file in RS")
handle = found[0]['handle'] handle = found[0]["handle"]
r = req(args, '/rsFiles/requestDirDetails', { 'handle': handle }) r = req(args, "/rsFiles/requestDirDetails", {"handle": handle})
filehash = r.json()['details']['hash'] filehash = r.json()["details"]["hash"]
return filehash return filehash
@ -98,4 +100,3 @@ def get_file_link(args, hash_digest, fname=None, size=None):
if size is None: if size is None:
size = os.stat(fsdb.get_file_path(hash_digest)).st_size size = os.stat(fsdb.get_file_path(hash_digest)).st_size
return "retroshare://file?name=%s&size=%d&hash=%s" % (fname, size, hash_digest) return "retroshare://file?name=%s&size=%d&hash=%s" % (fname, size, hash_digest)