2012-12-12 14:23:00 -05:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
2014-02-04 14:15:02 -05:00
|
|
|
# Copyright © 2012-2014 Zulip, Inc.
|
2012-11-26 10:55:22 -05:00
|
|
|
#
|
2012-12-12 14:23:00 -05:00
|
|
|
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
|
|
# of this software and associated documentation files (the "Software"), to deal
|
|
|
|
# in the Software without restriction, including without limitation the rights
|
|
|
|
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
|
|
# copies of the Software, and to permit persons to whom the Software is
|
|
|
|
# furnished to do so, subject to the following conditions:
|
2012-11-26 10:55:22 -05:00
|
|
|
#
|
2012-12-12 14:23:00 -05:00
|
|
|
# The above copyright notice and this permission notice shall be included in
|
|
|
|
# all copies or substantial portions of the Software.
|
2012-11-26 10:55:22 -05:00
|
|
|
#
|
2012-12-12 14:23:00 -05:00
|
|
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
|
|
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
|
|
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
|
|
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
|
|
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
|
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
|
|
|
# THE SOFTWARE.
|
2012-11-26 10:55:22 -05:00
|
|
|
|
2012-10-01 15:36:44 -04:00
|
|
|
import simplejson
|
2012-10-04 16:13:47 -04:00
|
|
|
import requests
|
2012-10-01 15:36:44 -04:00
|
|
|
import time
|
2012-10-02 15:47:59 -04:00
|
|
|
import traceback
|
2012-10-18 11:32:58 -04:00
|
|
|
import urlparse
|
2012-10-22 14:31:21 -04:00
|
|
|
import sys
|
2012-11-26 10:45:11 -05:00
|
|
|
import os
|
2012-12-10 14:42:20 -05:00
|
|
|
import optparse
|
2013-12-06 16:37:01 -05:00
|
|
|
import platform
|
2014-03-01 23:49:17 -05:00
|
|
|
import urllib
|
2013-03-06 13:48:24 -05:00
|
|
|
from distutils.version import LooseVersion
|
2012-12-10 14:42:20 -05:00
|
|
|
|
|
|
|
from ConfigParser import SafeConfigParser
|
2013-10-30 15:56:43 -04:00
|
|
|
import logging
|
2012-10-03 17:21:09 -04:00
|
|
|
|
2013-01-31 15:09:59 -05:00
|
|
|
|
2014-01-07 13:49:48 -05:00
|
|
|
__version__ = "0.2.4"
|
2013-01-31 15:09:59 -05:00
|
|
|
|
2012-10-04 18:06:54 -04:00
|
|
|
# Check that we have a recent enough version
|
2012-11-01 17:50:38 -04:00
|
|
|
# Older versions don't provide the 'json' attribute on responses.
|
2013-03-06 13:48:24 -05:00
|
|
|
assert(LooseVersion(requests.__version__) >= LooseVersion('0.12.1'))
|
2013-02-05 15:08:48 -05:00
|
|
|
# In newer versions, the 'json' attribute is a function, not a property
|
2013-03-29 11:08:24 -04:00
|
|
|
requests_json_is_function = callable(requests.Response.json)
|
2013-02-05 15:08:48 -05:00
|
|
|
|
2013-06-27 12:09:41 -04:00
|
|
|
API_VERSTRING = "v1/"
|
2012-10-04 18:06:54 -04:00
|
|
|
|
2013-12-06 17:33:09 -05:00
|
|
|
def _default_client():
|
|
|
|
return "ZulipPython/" + __version__
|
|
|
|
|
2014-02-21 13:49:35 -05:00
|
|
|
def generate_option_group(parser, prefix=''):
|
2014-02-21 13:45:09 -05:00
|
|
|
group = optparse.OptionGroup(parser, 'Zulip API configuration')
|
2014-02-21 13:49:35 -05:00
|
|
|
group.add_option('--%ssite' % (prefix,),
|
2014-02-21 14:11:29 -05:00
|
|
|
dest="zulip_site",
|
2013-12-04 15:17:18 -05:00
|
|
|
help="Zulip Enterprise server URI (if using Zulip Enterprise)",
|
|
|
|
default=None)
|
2014-02-21 13:49:35 -05:00
|
|
|
group.add_option('--%sapi-key' % (prefix,),
|
2014-02-21 14:11:29 -05:00
|
|
|
dest="zulip_api_key",
|
2012-12-10 14:42:20 -05:00
|
|
|
action='store')
|
2014-02-21 13:49:35 -05:00
|
|
|
group.add_option('--%suser' % (prefix,),
|
2014-02-21 14:11:29 -05:00
|
|
|
dest='zulip_email',
|
2013-05-29 14:00:27 -04:00
|
|
|
help='Email address of the calling bot or user.')
|
2014-02-21 13:49:35 -05:00
|
|
|
group.add_option('--%sconfig-file' % (prefix,),
|
2012-12-10 14:42:20 -05:00
|
|
|
action='store',
|
2014-02-21 14:11:29 -05:00
|
|
|
dest="zulip_config_file",
|
2013-08-07 11:36:46 -04:00
|
|
|
help='Location of an ini file containing the\nabove information. (default ~/.zuliprc)')
|
2012-12-10 14:42:20 -05:00
|
|
|
group.add_option('-v', '--verbose',
|
|
|
|
action='store_true',
|
|
|
|
help='Provide detailed output.')
|
2014-02-21 13:49:35 -05:00
|
|
|
group.add_option('--%sclient' % (prefix,),
|
2013-10-16 16:51:17 -04:00
|
|
|
action='store',
|
2013-12-06 17:34:20 -05:00
|
|
|
default=None,
|
2014-02-21 14:11:29 -05:00
|
|
|
dest="zulip_client",
|
2013-10-16 16:51:17 -04:00
|
|
|
help=optparse.SUPPRESS_HELP)
|
2012-12-10 14:42:20 -05:00
|
|
|
return group
|
|
|
|
|
2013-12-06 17:34:20 -05:00
|
|
|
def init_from_options(options, client=None):
|
2014-02-21 14:11:29 -05:00
|
|
|
if options.zulip_client is not None:
|
|
|
|
client = options.zulip_client
|
2013-12-06 17:34:20 -05:00
|
|
|
elif client is None:
|
|
|
|
client = _default_client()
|
2014-02-21 14:11:29 -05:00
|
|
|
return Client(email=options.zulip_email, api_key=options.zulip_api_key,
|
|
|
|
config_file=options.zulip_config_file, verbose=options.verbose,
|
|
|
|
site=options.zulip_site, client=client)
|
2012-12-10 14:42:20 -05:00
|
|
|
|
2012-12-03 12:24:49 -05:00
|
|
|
class Client(object):
|
2012-12-10 14:42:20 -05:00
|
|
|
def __init__(self, email=None, api_key=None, config_file=None,
|
2012-11-26 10:45:11 -05:00
|
|
|
verbose=False, retry_on_errors=True,
|
2013-12-06 17:33:09 -05:00
|
|
|
site=None, client=None):
|
|
|
|
if client is None:
|
|
|
|
client = _default_client()
|
2012-12-10 14:42:20 -05:00
|
|
|
if None in (api_key, email):
|
|
|
|
if config_file is None:
|
2013-08-07 11:36:46 -04:00
|
|
|
config_file = os.path.join(os.environ["HOME"], ".zuliprc")
|
|
|
|
if (not os.path.exists(config_file) and
|
|
|
|
os.path.exists(os.path.join(os.environ["HOME"], ".humbugrc"))):
|
|
|
|
raise RuntimeError("The Zulip API configuration file is now ~/.zuliprc; please run:\n\n mv ~/.humbugrc ~/.zuliprc\n")
|
2012-12-10 14:42:20 -05:00
|
|
|
if not os.path.exists(config_file):
|
|
|
|
raise RuntimeError("api_key or email not specified and %s does not exist"
|
|
|
|
% (config_file,))
|
|
|
|
config = SafeConfigParser()
|
|
|
|
with file(config_file, 'r') as f:
|
|
|
|
config.readfp(f, config_file)
|
|
|
|
if api_key is None:
|
|
|
|
api_key = config.get("api", "key")
|
|
|
|
if email is None:
|
|
|
|
email = config.get("api", "email")
|
2013-02-19 14:04:20 -05:00
|
|
|
if site is None and config.has_option("api", "site"):
|
|
|
|
site = config.get("api", "site")
|
2012-11-26 10:45:11 -05:00
|
|
|
|
2012-10-01 15:36:44 -04:00
|
|
|
self.api_key = api_key
|
|
|
|
self.email = email
|
|
|
|
self.verbose = verbose
|
2013-02-14 13:16:45 -05:00
|
|
|
if site is not None:
|
2013-06-17 17:06:28 -04:00
|
|
|
if not site.startswith("http"):
|
|
|
|
site = "https://" + site
|
2013-11-15 10:44:09 -05:00
|
|
|
# Remove trailing "/"s from site to simplify the below logic for adding "/api"
|
|
|
|
site = site.rstrip("/")
|
2013-02-14 13:16:45 -05:00
|
|
|
self.base_url = site
|
|
|
|
else:
|
2013-07-24 17:53:39 -04:00
|
|
|
self.base_url = "https://api.zulip.com"
|
2013-11-15 10:44:09 -05:00
|
|
|
|
2013-07-24 17:53:39 -04:00
|
|
|
if self.base_url != "https://api.zulip.com" and not self.base_url.endswith("/api"):
|
2013-06-24 15:48:32 -04:00
|
|
|
self.base_url += "/api"
|
2013-11-15 10:44:09 -05:00
|
|
|
self.base_url += "/"
|
2012-10-22 14:31:21 -04:00
|
|
|
self.retry_on_errors = retry_on_errors
|
2012-10-23 10:59:42 -04:00
|
|
|
self.client_name = client
|
2012-10-01 15:36:44 -04:00
|
|
|
|
2013-12-06 16:37:01 -05:00
|
|
|
def get_user_agent(self):
|
|
|
|
vendor = platform.system()
|
|
|
|
vendor_version = platform.release()
|
|
|
|
|
|
|
|
if vendor == "Linux":
|
|
|
|
vendor, vendor_version, dummy = platform.linux_distribution()
|
|
|
|
elif vendor == "Windows":
|
|
|
|
vendor_version = platform.win32_ver()[1]
|
|
|
|
elif vendor == "Darwin":
|
|
|
|
vendor_version = platform.mac_ver()[0]
|
|
|
|
|
|
|
|
return "{client_name} ({vendor}; {vendor_version})".format(
|
|
|
|
client_name=self.client_name,
|
|
|
|
vendor=vendor,
|
|
|
|
vendor_version=vendor_version,
|
|
|
|
)
|
|
|
|
|
2013-03-22 13:29:04 -04:00
|
|
|
def do_api_query(self, orig_request, url, method="POST", longpolling = False):
|
2012-11-29 09:35:30 -05:00
|
|
|
request = {}
|
2012-11-07 17:22:19 -05:00
|
|
|
|
2012-11-29 09:35:30 -05:00
|
|
|
for (key, val) in orig_request.iteritems():
|
2012-11-07 17:22:19 -05:00
|
|
|
if not (isinstance(val, str) or isinstance(val, unicode)):
|
|
|
|
request[key] = simplejson.dumps(val)
|
2012-11-29 09:35:30 -05:00
|
|
|
else:
|
|
|
|
request[key] = val
|
2012-11-07 17:22:19 -05:00
|
|
|
|
2012-11-29 09:17:09 -05:00
|
|
|
query_state = {
|
|
|
|
'had_error_retry': False,
|
|
|
|
'request': request,
|
|
|
|
'failures': 0,
|
|
|
|
}
|
|
|
|
|
|
|
|
def error_retry(error_string):
|
|
|
|
if not self.retry_on_errors or query_state["failures"] >= 10:
|
|
|
|
return False
|
|
|
|
if self.verbose:
|
|
|
|
if not query_state["had_error_retry"]:
|
2013-10-04 13:27:01 -04:00
|
|
|
sys.stdout.write("zulip API(%s): connection error%s -- retrying." % \
|
2013-06-27 12:09:41 -04:00
|
|
|
(url.split(API_VERSTRING, 2)[0], error_string,))
|
2012-11-29 09:17:09 -05:00
|
|
|
query_state["had_error_retry"] = True
|
|
|
|
else:
|
|
|
|
sys.stdout.write(".")
|
|
|
|
sys.stdout.flush()
|
|
|
|
query_state["request"]["dont_block"] = simplejson.dumps(True)
|
|
|
|
time.sleep(1)
|
|
|
|
query_state["failures"] += 1
|
|
|
|
return True
|
|
|
|
|
|
|
|
def end_error_retry(succeeded):
|
|
|
|
if query_state["had_error_retry"] and self.verbose:
|
|
|
|
if succeeded:
|
|
|
|
print "Success!"
|
|
|
|
else:
|
|
|
|
print "Failed!"
|
|
|
|
|
2012-10-04 18:06:54 -04:00
|
|
|
while True:
|
|
|
|
try:
|
2013-03-28 15:47:26 -04:00
|
|
|
if method == "GET":
|
|
|
|
kwarg = "params"
|
|
|
|
else:
|
|
|
|
kwarg = "data"
|
|
|
|
kwargs = {kwarg: query_state["request"]}
|
2013-03-22 13:29:04 -04:00
|
|
|
res = requests.request(
|
|
|
|
method,
|
|
|
|
urlparse.urljoin(self.base_url, url),
|
|
|
|
auth=requests.auth.HTTPBasicAuth(self.email,
|
|
|
|
self.api_key),
|
2013-03-29 11:05:40 -04:00
|
|
|
verify=True, timeout=90,
|
2013-12-06 16:37:01 -05:00
|
|
|
headers={"User-agent": self.get_user_agent()},
|
2013-03-28 15:47:26 -04:00
|
|
|
**kwargs)
|
2012-10-22 14:31:21 -04:00
|
|
|
|
|
|
|
# On 50x errors, try again after a short sleep
|
2012-11-29 09:17:09 -05:00
|
|
|
if str(res.status_code).startswith('5'):
|
|
|
|
if error_retry(" (server %s)" % (res.status_code,)):
|
|
|
|
continue
|
|
|
|
# Otherwise fall through and process the python-requests error normally
|
2012-11-09 15:38:34 -05:00
|
|
|
except (requests.exceptions.Timeout, requests.exceptions.SSLError) as e:
|
|
|
|
# Timeouts are either a Timeout or an SSLError; we
|
|
|
|
# want the later exception handlers to deal with any
|
|
|
|
# non-timeout other SSLErrors
|
|
|
|
if (isinstance(e, requests.exceptions.SSLError) and
|
|
|
|
str(e) != "The read operation timed out"):
|
|
|
|
raise
|
2012-11-09 14:16:22 -05:00
|
|
|
if longpolling:
|
|
|
|
# When longpolling, we expect the timeout to fire,
|
|
|
|
# and the correct response is to just retry
|
|
|
|
continue
|
|
|
|
else:
|
2012-11-29 09:17:09 -05:00
|
|
|
end_error_retry(False)
|
2012-11-09 14:16:22 -05:00
|
|
|
return {'msg': "Connection error:\n%s" % traceback.format_exc(),
|
|
|
|
"result": "connection-error"}
|
2012-10-04 18:06:54 -04:00
|
|
|
except requests.exceptions.ConnectionError:
|
2012-11-29 09:17:09 -05:00
|
|
|
if error_retry(""):
|
2012-10-22 14:31:21 -04:00
|
|
|
continue
|
2012-11-29 09:17:09 -05:00
|
|
|
end_error_retry(False)
|
2012-10-04 18:06:54 -04:00
|
|
|
return {'msg': "Connection error:\n%s" % traceback.format_exc(),
|
|
|
|
"result": "connection-error"}
|
|
|
|
except Exception:
|
2012-10-22 14:31:21 -04:00
|
|
|
# We'll split this out into more cases as we encounter new bugs.
|
2012-10-04 18:06:54 -04:00
|
|
|
return {'msg': "Unexpected error:\n%s" % traceback.format_exc(),
|
|
|
|
"result": "unexpected-error"}
|
|
|
|
|
2013-07-29 18:02:28 -04:00
|
|
|
try:
|
|
|
|
if requests_json_is_function:
|
|
|
|
json_result = res.json()
|
|
|
|
else:
|
|
|
|
json_result = res.json
|
|
|
|
except Exception:
|
|
|
|
json_result = None
|
|
|
|
|
2013-02-05 15:08:48 -05:00
|
|
|
if json_result is not None:
|
2012-11-29 09:17:09 -05:00
|
|
|
end_error_retry(True)
|
2013-02-05 15:08:48 -05:00
|
|
|
return json_result
|
2012-11-29 09:17:09 -05:00
|
|
|
end_error_retry(False)
|
2013-07-29 18:02:28 -04:00
|
|
|
return {'msg': "Unexpected error from the server", "result": "http-error",
|
2012-10-04 18:06:54 -04:00
|
|
|
"status_code": res.status_code}
|
|
|
|
|
2012-11-07 15:48:37 -05:00
|
|
|
@classmethod
|
2013-03-22 13:29:04 -04:00
|
|
|
def _register(cls, name, url=None, make_request=(lambda request={}: request),
|
2014-03-01 23:49:17 -05:00
|
|
|
method="POST", computed_url=None, **query_kwargs):
|
2012-11-07 15:48:37 -05:00
|
|
|
if url is None:
|
|
|
|
url = name
|
|
|
|
def call(self, *args, **kwargs):
|
|
|
|
request = make_request(*args, **kwargs)
|
2014-03-01 23:49:17 -05:00
|
|
|
if computed_url is not None:
|
|
|
|
req_url = computed_url(request)
|
|
|
|
else:
|
|
|
|
req_url = url
|
|
|
|
return self.do_api_query(request, API_VERSTRING + req_url, method=method, **query_kwargs)
|
2012-11-07 15:48:37 -05:00
|
|
|
call.func_name = name
|
|
|
|
setattr(cls, name, call)
|
2012-11-16 14:15:03 -05:00
|
|
|
|
2013-12-10 10:28:16 -05:00
|
|
|
def call_on_each_event(self, callback, event_types=None, narrow=[]):
|
2013-03-21 18:14:13 -04:00
|
|
|
def do_register():
|
2013-03-22 17:44:58 -04:00
|
|
|
while True:
|
|
|
|
if event_types is None:
|
|
|
|
res = self.register()
|
|
|
|
else:
|
2013-12-10 10:28:16 -05:00
|
|
|
res = self.register(event_types=event_types, narrow=narrow)
|
2013-03-22 17:44:58 -04:00
|
|
|
|
|
|
|
if 'error' in res.get('result'):
|
|
|
|
if self.verbose:
|
|
|
|
print "Server returned error:\n%s" % res['msg']
|
|
|
|
time.sleep(1)
|
|
|
|
else:
|
|
|
|
return (res['queue_id'], res['last_event_id'])
|
2013-03-21 18:14:13 -04:00
|
|
|
|
|
|
|
queue_id = None
|
2012-10-01 15:36:44 -04:00
|
|
|
while True:
|
2013-03-21 18:14:13 -04:00
|
|
|
if queue_id is None:
|
|
|
|
(queue_id, last_event_id) = do_register()
|
|
|
|
|
|
|
|
res = self.get_events(queue_id=queue_id, last_event_id=last_event_id)
|
2012-10-04 16:13:47 -04:00
|
|
|
if 'error' in res.get('result'):
|
2013-02-12 13:59:28 -05:00
|
|
|
if res["result"] == "http-error":
|
|
|
|
if self.verbose:
|
2013-03-22 17:44:58 -04:00
|
|
|
print "HTTP error fetching events -- probably a server restart"
|
2013-02-12 13:59:28 -05:00
|
|
|
elif res["result"] == "connection-error":
|
|
|
|
if self.verbose:
|
2013-03-22 17:44:58 -04:00
|
|
|
print "Connection error fetching events -- probably server is temporarily down?"
|
2013-02-12 13:59:28 -05:00
|
|
|
else:
|
|
|
|
if self.verbose:
|
2012-10-04 16:13:47 -04:00
|
|
|
print "Server returned error:\n%s" % res["msg"]
|
2013-03-21 18:14:13 -04:00
|
|
|
if res["msg"].startswith("Bad event queue id:"):
|
|
|
|
# Our event queue went away, probably because
|
|
|
|
# we were asleep or the server restarted
|
|
|
|
# abnormally. We may have missed some
|
2013-03-22 17:44:58 -04:00
|
|
|
# events while the network was down or
|
2013-03-21 18:14:13 -04:00
|
|
|
# something, but there's not really anything
|
|
|
|
# we can do about it other than resuming
|
|
|
|
# getting new ones.
|
2013-02-12 13:59:28 -05:00
|
|
|
#
|
2013-03-21 18:14:13 -04:00
|
|
|
# Reset queue_id to register a new event queue.
|
|
|
|
queue_id = None
|
2012-10-04 16:13:47 -04:00
|
|
|
# TODO: Make this back off once it's more reliable
|
2012-10-01 15:36:44 -04:00
|
|
|
time.sleep(1)
|
2012-10-02 15:47:59 -04:00
|
|
|
continue
|
2013-03-21 18:14:13 -04:00
|
|
|
|
|
|
|
for event in res['events']:
|
|
|
|
last_event_id = max(last_event_id, int(event['id']))
|
2013-03-22 17:44:58 -04:00
|
|
|
callback(event)
|
|
|
|
|
|
|
|
def call_on_each_message(self, callback):
|
|
|
|
def event_callback(event):
|
|
|
|
if event['type'] == 'message':
|
|
|
|
callback(event['message'])
|
2013-03-21 18:14:13 -04:00
|
|
|
|
2013-03-22 17:44:58 -04:00
|
|
|
self.call_on_each_event(event_callback, ['message'])
|
2012-11-07 15:48:37 -05:00
|
|
|
|
2013-08-15 17:38:21 -04:00
|
|
|
def _mk_subs(streams, **kwargs):
|
|
|
|
result = kwargs
|
|
|
|
result['subscriptions'] = streams
|
|
|
|
return result
|
2012-11-07 15:48:37 -05:00
|
|
|
|
2013-03-29 15:43:48 -04:00
|
|
|
def _mk_rm_subs(streams):
|
2013-03-22 13:29:04 -04:00
|
|
|
return {'delete': streams}
|
|
|
|
|
2013-11-19 17:51:06 -05:00
|
|
|
def _mk_deregister(queue_id):
|
|
|
|
return {'queue_id': queue_id}
|
|
|
|
|
2013-12-10 10:28:16 -05:00
|
|
|
def _mk_events(event_types=None, narrow=[]):
|
2013-03-22 17:44:58 -04:00
|
|
|
if event_types is None:
|
|
|
|
return dict()
|
2013-12-10 10:28:16 -05:00
|
|
|
return dict(event_types=event_types, narrow=narrow)
|
2013-03-22 17:44:58 -04:00
|
|
|
|
2013-08-22 11:37:02 -04:00
|
|
|
def _kwargs_to_dict(**kwargs):
|
|
|
|
return kwargs
|
|
|
|
|
2013-10-30 15:56:43 -04:00
|
|
|
class ZulipStream(object):
|
|
|
|
"""
|
|
|
|
A Zulip stream-like object
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, type, to, subject, **kwargs):
|
|
|
|
self.client = Client(**kwargs)
|
|
|
|
self.type = type
|
|
|
|
self.to = to
|
|
|
|
self.subject = subject
|
|
|
|
|
|
|
|
def write(self, content):
|
|
|
|
message = {"type": self.type,
|
|
|
|
"to": self.to,
|
|
|
|
"subject": self.subject,
|
|
|
|
"content": content}
|
|
|
|
self.client.send_message(message)
|
|
|
|
|
|
|
|
def flush(self):
|
|
|
|
pass
|
|
|
|
|
2013-03-22 13:29:04 -04:00
|
|
|
Client._register('send_message', url='messages', make_request=(lambda request: request))
|
2013-05-14 15:18:11 -04:00
|
|
|
Client._register('update_message', method='PATCH', url='messages', make_request=(lambda request: request))
|
2013-03-22 13:29:04 -04:00
|
|
|
Client._register('get_messages', method='GET', url='messages/latest', longpolling=True)
|
2013-03-21 18:14:13 -04:00
|
|
|
Client._register('get_events', url='events', method='GET', longpolling=True, make_request=(lambda **kwargs: kwargs))
|
2013-03-22 17:44:58 -04:00
|
|
|
Client._register('register', make_request=_mk_events)
|
2013-11-19 17:51:06 -05:00
|
|
|
Client._register('deregister', url="events", method="DELETE", make_request=_mk_deregister)
|
2013-03-22 13:29:04 -04:00
|
|
|
Client._register('get_profile', method='GET', url='users/me')
|
2013-08-22 11:37:02 -04:00
|
|
|
Client._register('get_streams', method='GET', url='streams', make_request=_kwargs_to_dict)
|
2013-03-22 13:29:04 -04:00
|
|
|
Client._register('get_members', method='GET', url='users')
|
|
|
|
Client._register('list_subscriptions', method='GET', url='users/me/subscriptions')
|
2013-08-15 17:38:21 -04:00
|
|
|
Client._register('add_subscriptions', url='users/me/subscriptions', make_request=_mk_subs)
|
2013-03-29 15:43:48 -04:00
|
|
|
Client._register('remove_subscriptions', method='PATCH', url='users/me/subscriptions', make_request=_mk_rm_subs)
|
2014-03-01 23:49:17 -05:00
|
|
|
Client._register('get_subscribers', method='GET',
|
|
|
|
computed_url=lambda request: 'streams/%s/members' % (urllib.quote(request['stream'], safe=''),),
|
|
|
|
make_request=_kwargs_to_dict)
|
2013-07-30 17:25:00 -04:00
|
|
|
Client._register('render_message', method='GET', url='messages/render')
|
2013-12-09 16:26:10 -05:00
|
|
|
Client._register('create_user', method='POST', url='users')
|