channel post supports attachments

yay!
This commit is contained in:
boyska 2020-01-12 03:04:36 +01:00
parent 263f71f7cd
commit b3d58a7552
3 changed files with 35 additions and 21 deletions

View file

@ -1,4 +1,6 @@
#!/usr/bin/env python3
import os
import os.path
import sys
from pprint import pprint
import requests
@ -9,16 +11,17 @@ from rscli.httputils import req
try:
from colors import color
has_colors = True
except ImportError:
has_colors = False
if not has_colors or not sys.stdout.isatty():
def color(text, *args, **kwargs):
return text
def err(msg):
print(color(msg, fg="red", style="bold"))
@ -27,7 +30,6 @@ def is_group_subscribed(mSubscribeFlags):
return bool(mSubscribeFlags & 4)
def main_forum_list(args):
r = req(args, "/rsGxsForums/getForumsSummaries")
forums = r.json()["forums"]
@ -47,7 +49,7 @@ def main_forum_list(args):
def main_forum_read(args):
r = req(args, "/rsGxsForums/getForumMsgMetaData", {"forumId": args.forum_id})
items = r.json()["msgMetas"]
items.sort(key=lambda p: p['mPublishTs'], reverse=True)
items.sort(key=lambda p: p["mPublishTs"], reverse=True)
if args.long:
msgs = [item["mMsgId"] for item in items[: args.num_posts]]
items_r = req(
@ -56,7 +58,7 @@ def main_forum_read(args):
{"forumId": args.forum_id, "msgsIds": msgs},
)
items = items_r.json()["msgs"]
items.sort(key=lambda p: p['mMeta']['mPublishTs'], reverse=True)
items.sort(key=lambda p: p["mMeta"]["mPublishTs"], reverse=True)
for item in items:
print(color(item["mMeta"]["mMsgName"], style="bold", fg="green"))
print()
@ -87,7 +89,7 @@ def main_channel_list(args):
def main_channel_read(args):
r = req(args, "/rsGxsChannels/getContentSummaries", {"channelId": args.channel_id})
posts = r.json()["summaries"]
posts.sort(key=lambda p: p['mPublishTs'], reverse=True)
posts.sort(key=lambda p: p["mPublishTs"], reverse=True)
if args.long:
msgs = [post["mMsgId"] for post in posts[: args.num_posts]]
posts_r = req(
@ -96,7 +98,7 @@ def main_channel_read(args):
{"channelId": args.channel_id, "contentsIds": msgs},
)
posts = posts_r.json()["posts"]
posts.sort(key=lambda p: p['mMeta']['mPublishTs'], reverse=True)
posts.sort(key=lambda p: p["mMeta"]["mPublishTs"], reverse=True)
for post in posts:
print(color(post["mMeta"]["mMsgName"], style="bold", fg="green"))
print()
@ -143,6 +145,16 @@ def main_channel_post_v1(args):
def main_channel_post(args):
attachments = [
{
"mHash": digest,
"mName": os.path.basename(fpath),
"mSize": os.stat(fpath).st_size,
}
for digest, fpath in zip(
rsfiles.file_publish(args, args.attach_fpaths), args.attach_fpaths
)
]
try:
r = req(
args,
@ -151,6 +163,7 @@ def main_channel_post(args):
"channelId": args.channel_id,
"title": args.post_title,
"mBody": args.post_body,
"files": attachments,
},
)
except requests.exceptions.HTTPError as exc:
@ -163,11 +176,11 @@ def main_channel_post(args):
print(ret["errorMessage"])
pprint(ret)
sys.exit(1)
pprint(ret)
print(color(ret["postId"], fg="green", style="bold"))
return
return main_channel_post_v1(args)
def main_file_publish(args):
ret = rsfiles.file_publish(args, args.fnames)
for filehash, fname in zip(ret, args.fnames):
@ -206,6 +219,7 @@ def get_parser():
ch_post.set_defaults(mainfunc=main_channel_post)
ch_post.add_argument("--post-title")
ch_post.add_argument("--post-body")
ch_post.add_argument("--attach-name", nargs="*", dest="attach_fpaths")
forum = p_sub.add_parser("forum")
forum.add_argument("--forum-id")
@ -227,7 +241,7 @@ def get_parser():
files = p_sub.add_parser("file")
files_sub = files.add_subparsers()
files_list = files_sub.add_parser("publish")
files_list.add_argument("fnames", nargs="+", metavar='fname' )
files_list.add_argument("fnames", nargs="+", metavar="fname")
files_list.set_defaults(mainfunc=main_file_publish)
# TODO: channel rss -> read and convert to rss

View file

@ -1,5 +1,6 @@
import requests
def req(args, location, data=None):
kwargs = {}
if data is not None:
@ -10,5 +11,3 @@ def req(args, location, data=None):
r.raise_for_status()
# TODO: handle r.status_code != 200
return r

View file

@ -2,6 +2,7 @@ import hashlib
import time
import uuid
import os.path
try:
from fsdb import Fsdb
except ImportError:
@ -37,19 +38,20 @@ def filename_to_hash(args, dir_virtualname, filename):
raise Exception("Error: could not find shared file in RS")
handle = found[0]["handle"]
r = req(args, '/rsFiles/ForceDirectoryCheck')
r = req(args, "/rsFiles/ForceDirectoryCheck")
time.sleep(5)
looking_for = filename.split(os.path.sep)
hashlib
for next_component in looking_for:
r = req(args, '/rsFiles/requestDirDetails', { 'handle': handle })
found = [c for c in r.json()['details']['children']
if c['name'] == next_component]
if not found:
raise Exception('Error: could not find shared file in RS')
handle = found[0]['handle']
r = req(args, '/rsFiles/requestDirDetails', { 'handle': handle })
filehash = r.json()['details']['hash']
r = req(args, "/rsFiles/requestDirDetails", {"handle": handle})
found = [
c for c in r.json()["details"]["children"] if c["name"] == next_component
]
if not found:
raise Exception("Error: could not find shared file in RS")
handle = found[0]["handle"]
r = req(args, "/rsFiles/requestDirDetails", {"handle": handle})
filehash = r.json()["details"]["hash"]
return filehash
@ -98,4 +100,3 @@ def get_file_link(args, hash_digest, fname=None, size=None):
if size is None:
size = os.stat(fsdb.get_file_path(hash_digest)).st_size
return "retroshare://file?name=%s&size=%d&hash=%s" % (fname, size, hash_digest)