From 797f0d4462390e054682ac9b1891aaa4695d1f58 Mon Sep 17 00:00:00 2001 From: Arkaitz Zubiaga Date: Thu, 21 Jan 2016 14:42:35 +0000 Subject: [PATCH] first version --- LICENSE | 201 +++++++++++++ README | 38 +++ get.thread.php | 151 ++++++++++ index.php | 17 ++ retrieve.tweet.list.py | 29 ++ retrieve.tweet.py | 27 ++ tweepy/LICENSE | 20 ++ tweepy/__init__.py | 27 ++ tweepy/__init__.pyc | Bin 0 -> 1249 bytes tweepy/api.py | 751 +++++++++++++++++++++++++++++++++++++++++++++++++ tweepy/api.pyc | Bin 0 -> 14873 bytes tweepy/auth.py | 156 ++++++++++ tweepy/auth.pyc | Bin 0 -> 6048 bytes tweepy/binder.py | 213 ++++++++++++++ tweepy/binder.pyc | Bin 0 -> 5664 bytes tweepy/cache.py | 424 ++++++++++++++++++++++++++++ tweepy/cache.pyc | Bin 0 -> 15887 bytes tweepy/cursor.py | 171 +++++++++++ tweepy/cursor.pyc | Bin 0 -> 7164 bytes tweepy/error.py | 15 + tweepy/error.pyc | Bin 0 -> 794 bytes tweepy/models.py | 433 ++++++++++++++++++++++++++++ tweepy/models.pyc | Bin 0 -> 18511 bytes tweepy/oauth.py | 655 ++++++++++++++++++++++++++++++++++++++++++ tweepy/oauth.pyc | Bin 0 -> 25801 bytes tweepy/parsers.py | 97 +++++++ tweepy/parsers.pyc | Bin 0 -> 4009 bytes tweepy/streaming.py | 319 +++++++++++++++++++++ tweepy/streaming.pyc | Bin 0 -> 11699 bytes tweepy/utils.py | 60 ++++ tweepy/utils.pyc | Bin 0 -> 2258 bytes twitter.ini | 5 + 32 files changed, 3809 insertions(+) create mode 100644 LICENSE create mode 100644 README create mode 100755 get.thread.php create mode 100755 index.php create mode 100755 retrieve.tweet.list.py create mode 100755 retrieve.tweet.py create mode 100644 tweepy/LICENSE create mode 100755 tweepy/__init__.py create mode 100644 tweepy/__init__.pyc create mode 100755 tweepy/api.py create mode 100644 tweepy/api.pyc create mode 100755 tweepy/auth.py create mode 100644 tweepy/auth.pyc create mode 100755 tweepy/binder.py create mode 100644 tweepy/binder.pyc create mode 100755 tweepy/cache.py create mode 100644 tweepy/cache.pyc create mode 100755 tweepy/cursor.py create mode 100644 tweepy/cursor.pyc create mode 100755 tweepy/error.py create mode 100644 tweepy/error.pyc create mode 100755 tweepy/models.py create mode 100644 tweepy/models.pyc create mode 100755 tweepy/oauth.py create mode 100644 tweepy/oauth.pyc create mode 100755 tweepy/parsers.py create mode 100644 tweepy/parsers.pyc create mode 100755 tweepy/streaming.py create mode 100644 tweepy/streaming.pyc create mode 100755 tweepy/utils.py create mode 100644 tweepy/utils.pyc create mode 100644 twitter.ini diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..8dada3e --- /dev/null +++ b/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/README b/README new file mode 100644 index 0000000..8c77d91 --- /dev/null +++ b/README @@ -0,0 +1,38 @@ +The PHEME conversation collection script allows the user to collect the set of tweets replying to a specific tweet, forming a conversation or a thread. The user needs to specify a single source tweet, providing its URL or tweet ID, and the tool collects the replies and stories them in the 'data/tweet-id' directory. + +NOTE: Since retrieval of replying tweets has been discontinued in Twitter API v1.1 (it used to be available through the 'related_results/show' endpoint in v1.0), this script scrapes the replies from the HTML of the source tweet. + +----- +SETUP +----- + +The script is developed using PHP (it was initially intended to run as a web service) and Python (for the back-end to access the Twitter API using the Tweepy library). It is ready to run on a system with PHP installed (i.e., the 'php5-cli' package on UNIX systems, or the equivalent PHP CLI for Windows). However, it does need to define a few settings in order to be able to access Twitter.com and the API. There are basically two steps: + +1. Create a cookie file to enable web access to replies: + +This can be done using Mozilla Firefox and the add-on Cookies Export/Import, which can be installed from: +https://addons.mozilla.org/en-US/firefox/addon/cookies-exportimport/ + +It is recommended to delete all existing cookies on Firefox before proceeding. Then, you should log in on Twitter.com with some user account. After logging in, click on 'Tool -> Export cookies', and save the file with the name 'cookies.txt'. This file should be copied into the main directory of this script. + +2. Add Twitter API credentials to twitter.ini: + +This is the Twitter API configuration file. You will see four lines there where you can specify the Twitter API credentials. + +------------- +HOW TO RUN IT +------------- + +Once you have the ID of the tweet that you want to get the conversation for, you can run the following command: + +php get.thread.php tweet-id + +Where tweet-id is the ID of the tweet that you are interested in. If successful, the script will output the number of replies that have been collected, and a new directory will be created in the 'data' folder. + +--------- +REFERENCE +--------- + +This conversation collection script was used for the following paper: + +Arkaitz Zubiaga, Geraldine Wong Sak Hoi, Maria Liakata, Rob Procter, Peter Tolmie. Analysing How People Orient to and Spread Rumours in Social Media by Looking at Conversational Threads. arXiv. 2015. diff --git a/get.thread.php b/get.thread.php new file mode 100755 index 0000000..959ec2d --- /dev/null +++ b/get.thread.php @@ -0,0 +1,151 @@ +", $content); + $content = str_replace("\\/", "/", $content); + $content = str_replace("\\\"", "\"", $content); + + if (preg_match_all("| $reptweet) { + $reptweettokens = explode("/", $reptweet); + $repusername = $reptweettokens[1]; + $reptweetid = $reptweettokens[count($reptweettokens) - 1]; + + if (!in_array($reptweetid, $replyingids)) { + array_push($replyingids, $reptweetid); + get_replying_ids($reptweetid, $repusername); + } + } + } + + $maxposition = ""; + if (preg_match("|data-min-position=\"([^\"]*)\"|U", $content, $mp) || preg_match("|\"min_position\":\"([^\"]*)\"|U", $content, $mp)) { + $maxposition = $mp[1]; + } + } while ($maxposition != ""); +} + +function add_to_structure ($tweetid, $inreplyto) { + global $structure; + + foreach ($structure as $id => $substructure) { + if ($id == $inreplyto) { + $structure[$id] = $tweetid; + } + else { + add_to_structure($tweetid, $inreplyto, $structure[$id]); + } + } +} + +function collect_replying_tweets ($tweetid, $username) { + global $argv, $replyingids; + $replycount = 0; + + @mkdir("data/" . $tweetid . "/reactions/"); + @chmod("data/" . $tweetid . "/reactions/", 0777); + get_replying_ids($tweetid, $username); + + $idsstr = ""; + $idcount = 0; + $allcount = 0; + foreach ($replyingids as $replyingid) { + $allcount++; + $idsstr .= $replyingid . ","; + $idcount++; + if ($idcount == 100 || $allcount == count($replyingids)) { + $tweets = @shell_exec("python retrieve.tweet.list.py " . substr($idsstr, 0, strlen($idsstr) - 1)); + $tweets = explode("\n", $tweets); + foreach ($tweets as $tweet) { + $tweetobj = @json_decode($tweet); + if (isset($tweetobj->id_str)) { + file_put_contents("data/" . $tweetid . "/reactions/" . $tweetobj->id_str . ".json", $tweet); + $replycount++; + } + } + + $idsstr = ""; + $idcount = 0; + } + } + + if (isset($argv[1])) { + echo $tweetid . " - source tweet and " . $replycount . " replies collected.\n"; + } +} + +function create_structure($tweetid) { + global $structure; + + $parents = array(); + $dir = dir("data/" . $tweetid . "/reactions/"); + while (($file = $dir->read()) !== false) { + if ($file != "." && $file != "..") { + $tweet = json_decode(file_get_contents("data/" . $tweetid . "/reactions/" . $file)); + + $inreplyto = $tweet->in_reply_to_status_id_str; + $id = $tweet->id; + + if (!isset($parents[$inreplyto])) { + $parents[$inreplyto] = array(); + } + array_push($parents[$inreplyto], $id); + } + } + + foreach ($structure as $sid => $substructure) { + if (isset($parents[$sid])) { + foreach ($parents[$sid] as $cid) { + $structure[$sid][$cid] = array(); + } + } + } + + file_put_contents("data/" . $tweetid . "/structure.json", json_encode($structure)); + chmod("data/" . $tweetid . "/structure.json", 0777); +} + +if (!isset($argv[1])) { + exit(0); +} +$tweetid = $argv[1]; + +if (strstr($tweetid, "/")) { + $tweetid = explode("/", $tweetid); + $tweetid = $tweetid[count($tweetid) - 1]; +} + +$replyingids = array(); +$structure = array($tweetid => array()); + +$sourcetweet = @shell_exec("python retrieve.tweet.py " . $tweetid); +$sourcetweetobj = json_decode($sourcetweet); +if (isset($sourcetweetobj->id_str)) { + $username = $sourcetweetobj->user->screen_name; + + @mkdir("data/" . $tweetid); + @chmod("data/" . $tweetid, 0766); + @mkdir("data/" . $tweetid . "/source-tweets/"); + @chmod("data/" . $tweetid . "/source-tweets/", 0766); + file_put_contents("data/" . $tweetid . "/source-tweets/" . $tweetid . ".json", $sourcetweet); + + collect_replying_tweets($tweetid, $username); + + create_structure($tweetid); +} +?> diff --git a/index.php b/index.php new file mode 100755 index 0000000..8b73a2c --- /dev/null +++ b/index.php @@ -0,0 +1,17 @@ + + + + + PHEME - Collection of Twitter Threads + + +

Collection of Twitter Threads/Conversations

+

This web application enables to collect the whole conversation in the form of replies, given the source tweet as the input. The output is rendered in an HTML file that visualises the conversation in a forum-like visualisation where replies are indented.

+ +

To start, please input the URL of a tweet for which you would like to see the whole conversation:

+ +
+ +
+ + diff --git a/retrieve.tweet.list.py b/retrieve.tweet.list.py new file mode 100755 index 0000000..1222f68 --- /dev/null +++ b/retrieve.tweet.list.py @@ -0,0 +1,29 @@ +import json +import tweepy +import sys +import pprint +import os +import ConfigParser +import time + +tweetid = sys.argv[1] + +config = ConfigParser.ConfigParser() +config.read('twitter.ini') + +consumer_key = config.get('Twitter', 'consumer_key') +consumer_secret = config.get('Twitter', 'consumer_secret') +access_key = config.get('Twitter', 'access_key') +access_secret = config.get('Twitter', 'access_secret') + +auth = tweepy.OAuthHandler(consumer_key, consumer_secret) +auth.set_access_token(access_key, access_secret) +api = tweepy.API(auth) + +try: + tweets = api._statuses_lookup(id=tweetid) + for tweet in tweets: + print json.dumps(tweet.json) + print tweet +except: + sys.exit() diff --git a/retrieve.tweet.py b/retrieve.tweet.py new file mode 100755 index 0000000..cc7b428 --- /dev/null +++ b/retrieve.tweet.py @@ -0,0 +1,27 @@ +import json +import tweepy +import sys +import pprint +import os +import ConfigParser +import time + +tweetid = sys.argv[1] + +config = ConfigParser.ConfigParser() +config.read('twitter.ini') + +consumer_key = config.get('Twitter', 'consumer_key') +consumer_secret = config.get('Twitter', 'consumer_secret') +access_key = config.get('Twitter', 'access_key') +access_secret = config.get('Twitter', 'access_secret') + +auth = tweepy.OAuthHandler(consumer_key, consumer_secret) +auth.set_access_token(access_key, access_secret) +api = tweepy.API(auth) + +try: + tweet = api.get_status(tweetid) + print json.dumps(tweet.json) +except: + sys.exit() diff --git a/tweepy/LICENSE b/tweepy/LICENSE new file mode 100644 index 0000000..545a75c --- /dev/null +++ b/tweepy/LICENSE @@ -0,0 +1,20 @@ +MIT License +Copyright (c) 2013-2014 Joshua Roesslein + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/tweepy/__init__.py b/tweepy/__init__.py new file mode 100755 index 0000000..05dbfc3 --- /dev/null +++ b/tweepy/__init__.py @@ -0,0 +1,27 @@ +# Tweepy +# Copyright 2009-2010 Joshua Roesslein +# See LICENSE for details. + +""" +Tweepy Twitter API library +""" +__version__ = '2.2' +__author__ = 'Joshua Roesslein' +__license__ = 'MIT' + +from tweepy.models import Status, User, DirectMessage, Friendship, SavedSearch, SearchResults, ModelFactory, Category +from tweepy.error import TweepError +from tweepy.api import API +from tweepy.cache import Cache, MemoryCache, FileCache +from tweepy.auth import OAuthHandler +from tweepy.streaming import Stream, StreamListener +from tweepy.cursor import Cursor + +# Global, unauthenticated instance of API +api = API() + +def debug(enable=True, level=1): + + import httplib + httplib.HTTPConnection.debuglevel = level + diff --git a/tweepy/__init__.pyc b/tweepy/__init__.pyc new file mode 100644 index 0000000000000000000000000000000000000000..14cff659b3023ec4078a7aa6b3bd564bbdb323ae GIT binary patch literal 1249 zcmbVL&2ke*5dK!OWlNTAVTdtcz(<{|l#eNj3MJwKDh6sSILu{hz1w9C%GJ+Fuke*c1!$HTbcU(} z-9gob?xI>lwGO)mw+_7y*MsiCZ9s3pZ9;Ft^`ZN41Ly(V7W5X}5PFDAkUiLKxE<)7 z*4}{Kg&RSST6+`r7Tg~69^7r{+i-WF@3d!q*dO5bq4!&R0DBki0Qvy#9`wD|y9Ik6 z?g8`zWHiDboW>#OA*vy&ZG7fT9--R7`FK|{J!CPMT@x*Cj%L>;M)F6`-o8At=E4{L zW)P$%C&wpY#QUoz%$LQHYA6ItrjBxW_Ht(a;%B_cM(nXTk40RDxW?+cKt67>^4xeT z;~4{sONs;boO(lb73O9Uw**vNQFTs*FXsu+s*1wW#t?`6#aUC4JuS-E_?swX(<0I( z8{>?PbZ1KB7w;S2`XoNiW-NPLPmpp>iSLXYL#KtdKNI zcjSsTJ5Pm+J8YjvQGYcdQq7;FFyxcz(uc+yu}i@Tz3fw7sp33UJYE*~y@O8~K6UUR zL(UV%cP6ISZAnwy8;qLAxZsu~))%wc+i6qRTu;-~DesCdmY0^U$hN6U5L6czmQvQ~ zMD4M2%&QgQN7g3u#?hqk9}5$In>?rRF*b`yoO4=Ln8Y*|lRP)IiFtm!xJkDqvb&59 z(qVLl+3WFRR-y!&=T%eYc`9+9Uy%lSZwOB#l?n%tW{H77L?gW))p zbU4b`ubd%u+timz{v4G~{u6hZnyzjMKtg8T^b5@Vl9ZI|-`Xeb|205ze!?YMT6!u) X2nQ^BS$aD^Wg~tMdF^MTZol(47^)NX literal 0 HcmV?d00001 diff --git a/tweepy/api.py b/tweepy/api.py new file mode 100755 index 0000000..b6d3cae --- /dev/null +++ b/tweepy/api.py @@ -0,0 +1,751 @@ +# Tweepy +# Copyright 2009-2010 Joshua Roesslein +# See LICENSE for details. + +import os +import mimetypes + +from tweepy.binder import bind_api +from tweepy.error import TweepError +from tweepy.parsers import ModelParser +from tweepy.utils import list_to_csv + + +class API(object): + """Twitter API""" + + def __init__(self, auth_handler=None, + host='api.twitter.com', search_host='search.twitter.com', + cache=None, secure=True, api_root='/1.1', search_root='', + retry_count=0, retry_delay=0, retry_errors=None, timeout=60, + parser=None, compression=False): + self.auth = auth_handler + self.host = host + self.search_host = search_host + self.api_root = api_root + self.search_root = search_root + self.cache = cache + self.secure = secure + self.compression = compression + self.retry_count = retry_count + self.retry_delay = retry_delay + self.retry_errors = retry_errors + self.timeout = timeout + self.parser = parser or ModelParser() + + """ statuses/home_timeline """ + home_timeline = bind_api( + path = '/statuses/home_timeline.json', + payload_type = 'status', payload_list = True, + allowed_param = ['since_id', 'max_id', 'count'], + require_auth = True + ) + + """ statuses/user_timeline """ + user_timeline = bind_api( + path = '/statuses/user_timeline.json', + payload_type = 'status', payload_list = True, + allowed_param = ['id', 'user_id', 'screen_name', 'since_id', + 'max_id', 'count', 'include_rts'] + ) + + """ statuses/mentions """ + mentions_timeline = bind_api( + path = '/statuses/mentions_timeline.json', + payload_type = 'status', payload_list = True, + allowed_param = ['since_id', 'max_id', 'count'], + require_auth = True + ) + + """/related_results/show/:id.format""" + related_results = bind_api( + path = '/related_results/show/{id}.json', + payload_type = 'relation', payload_list = True, + allowed_param = ['id'], + require_auth = False + ) + + """ statuses/retweets_of_me """ + retweets_of_me = bind_api( + path = '/statuses/retweets_of_me.json', + payload_type = 'status', payload_list = True, + allowed_param = ['since_id', 'max_id', 'count'], + require_auth = True + ) + + """ statuses/lookup """ + def statuses_lookup(self, id, include_entities=None, trim_user=None, map=None): + return self._statuses_lookup(list_to_csv(id), include_entities, trim_user, map) + + _statuses_lookup = bind_api( + path = '/statuses/lookup.json', + payload_type = 'status', payload_list = True, + allowed_param = ['id', 'include_entities', 'trim_user', 'map'], + require_auth = True + ) + + """ statuses/show """ + get_status = bind_api( + path = '/statuses/show.json', + payload_type = 'status', + allowed_param = ['id'] + ) + + """ statuses/update """ + update_status = bind_api( + path = '/statuses/update.json', + method = 'POST', + payload_type = 'status', + allowed_param = ['status', 'in_reply_to_status_id', 'lat', 'long', 'source', 'place_id'], + require_auth = True + ) + + """ statuses/update_with_media """ + def update_with_media(self, filename, *args, **kwargs): + headers, post_data = API._pack_image(filename, 3072, form_field='media[]') + kwargs.update({'headers': headers, 'post_data': post_data}) + + return bind_api( + path='/statuses/update_with_media.json', + method = 'POST', + payload_type='status', + allowed_param = [ + 'status', 'possibly_sensitive', 'in_reply_to_status_id', 'lat', 'long', + 'place_id', 'display_coordinates' + ], + require_auth=True + )(self, *args, **kwargs) + + """ statuses/destroy """ + destroy_status = bind_api( + path = '/statuses/destroy/{id}.json', + method = 'POST', + payload_type = 'status', + allowed_param = ['id'], + require_auth = True + ) + + """ statuses/retweet """ + retweet = bind_api( + path = '/statuses/retweet/{id}.json', + method = 'POST', + payload_type = 'status', + allowed_param = ['id'], + require_auth = True + ) + + """ statuses/retweets """ + retweets = bind_api( + path = '/statuses/retweets/{id}.json', + payload_type = 'status', payload_list = True, + allowed_param = ['id', 'count'], + require_auth = True + ) + + retweeters = bind_api( + path = '/statuses/retweeters/ids.json', + payload_type = 'ids', + allowed_param = ['id', 'cursor', 'stringify_ids'] + ) + + """ users/show """ + get_user = bind_api( + path = '/users/show.json', + payload_type = 'user', + allowed_param = ['id', 'user_id', 'screen_name'] + ) + + ''' statuses/oembed ''' + get_oembed = bind_api( + path = '/statuses/oembed.json', + payload_type = 'json', + allowed_param = ['id', 'url', 'maxwidth', 'hide_media', 'omit_script', 'align', 'related', 'lang'] + ) + + """ Perform bulk look up of users from user ID or screenname """ + def lookup_users(self, user_ids=None, screen_names=None): + return self._lookup_users(list_to_csv(user_ids), list_to_csv(screen_names)) + + _lookup_users = bind_api( + path = '/users/lookup.json', + payload_type = 'user', payload_list = True, + allowed_param = ['user_id', 'screen_name'], + ) + + """ Get the authenticated user """ + def me(self): + return self.get_user(screen_name=self.auth.get_username()) + + """ users/search """ + search_users = bind_api( + path = '/users/search.json', + payload_type = 'user', payload_list = True, + require_auth = True, + allowed_param = ['q', 'per_page', 'page'] + ) + + """ users/suggestions/:slug """ + suggested_users = bind_api( + path = '/users/suggestions/{slug}.json', + payload_type = 'user', payload_list = True, + require_auth = True, + allowed_param = ['slug', 'lang'] + ) + + """ users/suggestions """ + suggested_categories = bind_api( + path = '/users/suggestions.json', + payload_type = 'category', payload_list = True, + allowed_param = ['lang'], + require_auth = True + ) + + """ users/suggestions/:slug/members """ + suggested_users_tweets = bind_api( + path = '/users/suggestions/{slug}/members.json', + payload_type = 'status', payload_list = True, + allowed_param = ['slug'], + require_auth = True + ) + + """ direct_messages """ + direct_messages = bind_api( + path = '/direct_messages.json', + payload_type = 'direct_message', payload_list = True, + allowed_param = ['since_id', 'max_id', 'count'], + require_auth = True + ) + + """ direct_messages/show """ + get_direct_message = bind_api( + path = '/direct_messages/show/{id}.json', + payload_type = 'direct_message', + allowed_param = ['id'], + require_auth = True + ) + + """ direct_messages/sent """ + sent_direct_messages = bind_api( + path = '/direct_messages/sent.json', + payload_type = 'direct_message', payload_list = True, + allowed_param = ['since_id', 'max_id', 'count', 'page'], + require_auth = True + ) + + """ direct_messages/new """ + send_direct_message = bind_api( + path = '/direct_messages/new.json', + method = 'POST', + payload_type = 'direct_message', + allowed_param = ['user', 'screen_name', 'user_id', 'text'], + require_auth = True + ) + + """ direct_messages/destroy """ + destroy_direct_message = bind_api( + path = '/direct_messages/destroy.json', + method = 'DELETE', + payload_type = 'direct_message', + allowed_param = ['id'], + require_auth = True + ) + + """ friendships/create """ + create_friendship = bind_api( + path = '/friendships/create.json', + method = 'POST', + payload_type = 'user', + allowed_param = ['id', 'user_id', 'screen_name', 'follow'], + require_auth = True + ) + + """ friendships/destroy """ + destroy_friendship = bind_api( + path = '/friendships/destroy.json', + method = 'DELETE', + payload_type = 'user', + allowed_param = ['id', 'user_id', 'screen_name'], + require_auth = True + ) + + """ friendships/show """ + show_friendship = bind_api( + path = '/friendships/show.json', + payload_type = 'friendship', + allowed_param = ['source_id', 'source_screen_name', + 'target_id', 'target_screen_name'] + ) + + """ Perform bulk look up of friendships from user ID or screenname """ + def lookup_friendships(self, user_ids=None, screen_names=None): + return self._lookup_friendships(list_to_csv(user_ids), list_to_csv(screen_names)) + + _lookup_friendships = bind_api( + path = '/friendships/lookup.json', + payload_type = 'relationship', payload_list = True, + allowed_param = ['user_id', 'screen_name'], + require_auth = True + ) + + + """ friends/ids """ + friends_ids = bind_api( + path = '/friends/ids.json', + payload_type = 'ids', + allowed_param = ['id', 'user_id', 'screen_name', 'cursor'] + ) + + """ friends/list """ + friends = bind_api( + path = '/friends/list.json', + payload_type = 'user', payload_list = True, + allowed_param = ['id', 'user_id', 'screen_name', 'cursor'] + ) + + """ friendships/incoming """ + friendships_incoming = bind_api( + path = '/friendships/incoming.json', + payload_type = 'ids', + allowed_param = ['cursor'] + ) + + """ friendships/outgoing""" + friendships_outgoing = bind_api( + path = '/friendships/outgoing.json', + payload_type = 'ids', + allowed_param = ['cursor'] + ) + + """ followers/ids """ + followers_ids = bind_api( + path = '/followers/ids.json', + payload_type = 'ids', + allowed_param = ['id', 'user_id', 'screen_name', 'cursor'] + ) + + """ followers/list """ + followers = bind_api( + path = '/followers/list.json', + payload_type = 'user', payload_list = True, + allowed_param = ['id', 'user_id', 'screen_name', 'cursor', 'count', + 'skip_status', 'include_user_entities'] + ) + + """ account/verify_credentials """ + def verify_credentials(self, **kargs): + try: + return bind_api( + path = '/account/verify_credentials.json', + payload_type = 'user', + require_auth = True, + allowed_param = ['include_entities', 'skip_status'], + )(self, **kargs) + except TweepError, e: + if e.response and e.response.status == 401: + return False + raise + + """ account/rate_limit_status """ + rate_limit_status = bind_api( + path = '/application/rate_limit_status.json', + payload_type = 'json', + allowed_param = ['resources'], + use_cache = False + ) + + """ account/update_delivery_device """ + set_delivery_device = bind_api( + path = '/account/update_delivery_device.json', + method = 'POST', + allowed_param = ['device'], + payload_type = 'user', + require_auth = True + ) + + """ account/update_profile_colors """ + update_profile_colors = bind_api( + path = '/account/update_profile_colors.json', + method = 'POST', + payload_type = 'user', + allowed_param = ['profile_background_color', 'profile_text_color', + 'profile_link_color', 'profile_sidebar_fill_color', + 'profile_sidebar_border_color'], + require_auth = True + ) + + """ account/update_profile_image """ + def update_profile_image(self, filename): + headers, post_data = API._pack_image(filename, 700) + return bind_api( + path = '/account/update_profile_image.json', + method = 'POST', + payload_type = 'user', + require_auth = True + )(self, post_data=post_data, headers=headers) + + """ account/update_profile_background_image """ + def update_profile_background_image(self, filename, *args, **kargs): + headers, post_data = API._pack_image(filename, 800) + bind_api( + path = '/account/update_profile_background_image.json', + method = 'POST', + payload_type = 'user', + allowed_param = ['tile'], + require_auth = True + )(self, post_data=post_data, headers=headers) + + """ account/update_profile_banner """ + def update_profile_banner(self, filename, *args, **kargs): + headers, post_data = API._pack_image(filename, 700, form_field="banner") + bind_api( + path = '/account/update_profile_banner.json', + method = 'POST', + allowed_param = ['width', 'height', 'offset_left', 'offset_right'], + require_auth = True + )(self, post_data=post_data, headers=headers) + + + """ account/update_profile """ + update_profile = bind_api( + path = '/account/update_profile.json', + method = 'POST', + payload_type = 'user', + allowed_param = ['name', 'url', 'location', 'description'], + require_auth = True + ) + + """ favorites """ + favorites = bind_api( + path = '/favorites/list.json', + payload_type = 'status', payload_list = True, + allowed_param = ['screen_name', 'user_id', 'max_id', 'count', 'since_id', 'max_id'] + ) + + """ favorites/create """ + create_favorite = bind_api( + path = '/favorites/create.json', + method = 'POST', + payload_type = 'status', + allowed_param = ['id'], + require_auth = True + ) + + """ favorites/destroy """ + destroy_favorite = bind_api( + path = '/favorites/destroy.json', + method = 'POST', + payload_type = 'status', + allowed_param = ['id'], + require_auth = True + ) + + """ blocks/create """ + create_block = bind_api( + path = '/blocks/create.json', + method = 'POST', + payload_type = 'user', + allowed_param = ['id', 'user_id', 'screen_name'], + require_auth = True + ) + + """ blocks/destroy """ + destroy_block = bind_api( + path = '/blocks/destroy.json', + method = 'DELETE', + payload_type = 'user', + allowed_param = ['id', 'user_id', 'screen_name'], + require_auth = True + ) + + """ blocks/blocking """ + blocks = bind_api( + path = '/blocks/list.json', + payload_type = 'user', payload_list = True, + allowed_param = ['cursor'], + require_auth = True + ) + + """ blocks/blocking/ids """ + blocks_ids = bind_api( + path = '/blocks/ids.json', + payload_type = 'json', + require_auth = True + ) + + """ report_spam """ + report_spam = bind_api( + path = '/users/report_spam.json', + method = 'POST', + payload_type = 'user', + allowed_param = ['user_id', 'screen_name'], + require_auth = True + ) + + """ saved_searches """ + saved_searches = bind_api( + path = '/saved_searches/list.json', + payload_type = 'saved_search', payload_list = True, + require_auth = True + ) + + """ saved_searches/show """ + get_saved_search = bind_api( + path = '/saved_searches/show/{id}.json', + payload_type = 'saved_search', + allowed_param = ['id'], + require_auth = True + ) + + """ saved_searches/create """ + create_saved_search = bind_api( + path = '/saved_searches/create.json', + method = 'POST', + payload_type = 'saved_search', + allowed_param = ['query'], + require_auth = True + ) + + """ saved_searches/destroy """ + destroy_saved_search = bind_api( + path = '/saved_searches/destroy/{id}.json', + method = 'POST', + payload_type = 'saved_search', + allowed_param = ['id'], + require_auth = True + ) + + create_list = bind_api( + path = '/lists/create.json', + method = 'POST', + payload_type = 'list', + allowed_param = ['name', 'mode', 'description'], + require_auth = True + ) + + destroy_list = bind_api( + path = '/lists/destroy.json', + method = 'POST', + payload_type = 'list', + allowed_param = ['owner_screen_name', 'owner_id', 'list_id', 'slug'], + require_auth = True + ) + + update_list = bind_api( + path = '/lists/update.json', + method = 'POST', + payload_type = 'list', + allowed_param = ['list_id', 'slug', 'name', 'mode', 'description', 'owner_screen_name', 'owner_id'], + require_auth = True + ) + + lists_all = bind_api( + path = '/lists/list.json', + payload_type = 'list', payload_list = True, + allowed_param = ['screen_name', 'user_id'], + require_auth = True + ) + + lists_memberships = bind_api( + path = '/lists/memberships.json', + payload_type = 'list', payload_list = True, + allowed_param = ['screen_name', 'user_id', 'filter_to_owned_lists', 'cursor'], + require_auth = True + ) + + lists_subscriptions = bind_api( + path = '/lists/subscriptions.json', + payload_type = 'list', payload_list = True, + allowed_param = ['screen_name', 'user_id', 'cursor'], + require_auth = True + ) + + list_timeline = bind_api( + path = '/lists/statuses.json', + payload_type = 'status', payload_list = True, + allowed_param = ['owner_screen_name', 'slug', 'owner_id', 'list_id', 'since_id', 'max_id', 'count', 'include_rts'] + ) + + get_list = bind_api( + path = '/lists/show.json', + payload_type = 'list', + allowed_param = ['owner_screen_name', 'owner_id', 'slug', 'list_id'] + ) + + add_list_member = bind_api( + path = '/lists/members/create.json', + method = 'POST', + payload_type = 'list', + allowed_param = ['screen_name', 'user_id', 'owner_screen_name', 'owner_id', 'slug', 'list_id'], + require_auth = True + ) + + remove_list_member = bind_api( + path = '/lists/members/destroy.json', + method = 'POST', + payload_type = 'list', + allowed_param = ['screen_name', 'user_id', 'owner_screen_name', 'owner_id', 'slug', 'list_id'], + require_auth = True + ) + + list_members = bind_api( + path = '/lists/members.json', + payload_type = 'user', payload_list = True, + allowed_param = ['owner_screen_name', 'slug', 'list_id', 'owner_id', 'cursor'] + ) + + show_list_member = bind_api( + path = '/lists/members/show.json', + payload_type = 'user', + allowed_param = ['list_id', 'slug', 'user_id', 'screen_name', 'owner_screen_name', 'owner_id'] + ) + + subscribe_list = bind_api( + path = '/lists/subscribers/create.json', + method = 'POST', + payload_type = 'list', + allowed_param = ['owner_screen_name', 'slug', 'owner_id', 'list_id'], + require_auth = True + ) + + unsubscribe_list = bind_api( + path = '/lists/subscribers/destroy.json', + method = 'POST', + payload_type = 'list', + allowed_param = ['owner_screen_name', 'slug', 'owner_id', 'list_id'], + require_auth = True + ) + + list_subscribers = bind_api( + path = '/lists/subscribers.json', + payload_type = 'user', payload_list = True, + allowed_param = ['owner_screen_name', 'slug', 'owner_id', 'list_id', 'cursor'] + ) + + show_list_subscriber = bind_api( + path = '/lists/subscribers/show.json', + payload_type = 'user', + allowed_param = ['owner_screen_name', 'slug', 'screen_name', 'owner_id', 'list_id', 'user_id'] + ) + + """ trends/available """ + trends_available = bind_api( + path = '/trends/available.json', + payload_type = 'json' + ) + + trends_place = bind_api( + path = '/trends/place.json', + payload_type = 'json', + allowed_param = ['id', 'exclude'] + ) + + trends_closest = bind_api( + path = '/trends/closest.json', + payload_type = 'json', + allowed_param = ['lat', 'long'] + ) + + """ search """ + search = bind_api( + path = '/search/tweets.json', + payload_type = 'search_results', + allowed_param = ['q', 'lang', 'locale', 'since_id', 'geocode', 'max_id', 'since', 'until', 'result_type', 'count', 'include_entities', 'from', 'to', 'source'] + ) + + """ trends/daily """ + trends_daily = bind_api( + path = '/trends/daily.json', + payload_type = 'json', + allowed_param = ['date', 'exclude'] + ) + + """ trends/weekly """ + trends_weekly = bind_api( + path = '/trends/weekly.json', + payload_type = 'json', + allowed_param = ['date', 'exclude'] + ) + + """ geo/reverse_geocode """ + reverse_geocode = bind_api( + path = '/geo/reverse_geocode.json', + payload_type = 'place', payload_list = True, + allowed_param = ['lat', 'long', 'accuracy', 'granularity', 'max_results'] + ) + + """ geo/id """ + geo_id = bind_api( + path = '/geo/id/{id}.json', + payload_type = 'place', + allowed_param = ['id'] + ) + + """ geo/search """ + geo_search = bind_api( + path = '/geo/search.json', + payload_type = 'place', payload_list = True, + allowed_param = ['lat', 'long', 'query', 'ip', 'granularity', 'accuracy', 'max_results', 'contained_within'] + ) + + """ geo/similar_places """ + geo_similar_places = bind_api( + path = '/geo/similar_places.json', + payload_type = 'place', payload_list = True, + allowed_param = ['lat', 'long', 'name', 'contained_within'] + ) + + """ help/languages.json """ + supported_languages = bind_api( + path = '/help/languages.json', + payload_type = 'json', + require_auth = True + ) + + """ help/configuration """ + configuration = bind_api( + path = '/help/configuration.json', + payload_type = 'json', + require_auth = True + ) + + """ Internal use only """ + @staticmethod + def _pack_image(filename, max_size, form_field="image"): + """Pack image from file into multipart-formdata post body""" + # image must be less than 700kb in size + try: + if os.path.getsize(filename) > (max_size * 1024): + raise TweepError('File is too big, must be less than 700kb.') + except os.error: + raise TweepError('Unable to access file') + + # image must be gif, jpeg, or png + file_type = mimetypes.guess_type(filename) + if file_type is None: + raise TweepError('Could not determine file type') + file_type = file_type[0] + if file_type not in ['image/gif', 'image/jpeg', 'image/png']: + raise TweepError('Invalid file type for image: %s' % file_type) + + # build the mulitpart-formdata body + fp = open(filename, 'rb') + BOUNDARY = 'Tw3ePy' + body = [] + body.append('--' + BOUNDARY) + body.append('Content-Disposition: form-data; name="%s"; filename="%s"' % (form_field, filename)) + body.append('Content-Type: %s' % file_type) + body.append('') + body.append(fp.read()) + body.append('--' + BOUNDARY + '--') + body.append('') + fp.close() + body = '\r\n'.join(body) + + # build headers + headers = { + 'Content-Type': 'multipart/form-data; boundary=Tw3ePy', + 'Content-Length': str(len(body)) + } + + return headers, body + diff --git a/tweepy/api.pyc b/tweepy/api.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6030059653b843ee36749b4d8e6d3ba486bea48c GIT binary patch literal 14873 zcmd5@X`36zaqij0F1gDkm*OE&q7IM}rPV=#x~SVaNXj~w6i8jr0b$Xz*x3az(9D3; zN~GfRL=I}mi!g;d_Lfgz|##}Mx?ZysHb#7@prj7m^|1B>C zc+B{|67_AfHHZ{456Jn8VK_LQBypm!9e}+W+pv4Km84+-*p1SxnZ?a^dLiS|o;~|w zzSMnqe-7XD)?LPgSfnhhF=gz(k{2rvVTDVxXlLYuu2~|4cm?x2O#^G$X6$xj=Zu{< z7VB6r9JSNfUB)gNyW4a?;W`4Zn3=|QkH2REf3LCojNLyPXs}{q4 zndCy)<%d-L5Qs|{yJDsri@x`ev4@Sly?_Jp-eK$!WA8NfE@PKW2kOYl?l$&rT2uoN zRmwdR6!PV(I`(+Xd%p;CugAR4^OGwY%Y>F;FgYjtJn~UNJ|^}6w3@NUX%AfHevfm% z;5^`Q#0DJB^&aOz!FkBzJZ$U7VHyZmD5#3E5|LHQmER9zEjIqxe`<$*~rg6~YpS%kGDdCN?%rtKH$fpJQ z`GQ%3e>F?h%Pk)BtAbhFR;S9Cw|dMM1oOoL%`LaaZ65z6kMC{ItFO|idYqR%PA)W0 zlM*PJ^dUdwm4YFG84!nNv8f^LSIa{V`ytCR}zG>claUo z0`c;Yl3zOF@y`hx=Y{jvjeWz|H;sLZ#A&qHcWPa4i*?=QM>S;BI|bo;>+({aX)Jl< zcLh1$6oQGZ;ikCTqrE3+;5ipQc2Ehq(zwSXzh9sUQh{)LukUSc)q9`s{h-i0KE-t7 zs7Gsk9zXB^cE#AX_t;pSl?_$%!$TSVkO+2FY`CLNxY9W8an`(kVFQui;7ox8a>sJN z$6WWAiU9l#W4p%ojO`m6n+{H>d=w}Qgc-Pz59I-DB=YdECzwtU%#4Mf85(<`Y+5J$ zkc+aS;@(s{ms3xVKlo?5mq+}Nj|#J3;H0TP>M=LJ2=g(I`EddBb1eLEkN=6_>vsDb z{3ksAr3t-@fsMrbq{qDMRqP}`-FV7(eQknCvEIN|9Q#}`Q?crf^7XnRwy7Y8p;2(ANws43mD5ih8Sc?$j^J^Z*Prke}%_% ziMOx%A-{T+A;2##{{@f#9l@cy?DBFZzMM<|l&IRALzQO>#vi z{E|oe^$A(&1}qMlEePsmKkzsHmjhq%1AkK(!>-9CtSf?!uqeoZQDvp^s>lDX$Hzic zk7D*tiF6Lrjb)GgTN7Lr#8uMB89(H=zx0r^e#rMcOJhq=rQ}O^&Ex-$$5&lI5fHK> z?_>PQCDc9g_XYVO0H+$~Jn(lNP~DyVJ;sV0b>1WVegWYKgIkC2x<~i}LHL2}xMvtp z**}yt;g7_G{@B<*G4_XyLK$!W$k;zM2#-6vWR^EPkv}WU^51>m*$MH1CjU9tY1mzrp=7_g*6Q1CnAAD6PL}IjN1deWb8J{xsB>_gimmTv za~&J%w7wI~6297`_u{Fx7WcwhE7@p8*~hh0VY-pUgIczhge{xaGS(&5%!>!t)(|}qBLt|!!%4~#wHiujr!rS^%Sz>0tT(kZrrlX zY;)k`ECWh?EQMrTMXTG5FNU@WakYBFSQ_=)VKcHK#a`c!)B7DX%)DmW%j~;2D?bhbkp02l)xe~ zun*fAcC)mWuEiH?pG5Yuii4UJ6pp{8L=_eUon&boybGc$J!vjv6^(hlUVHFnbPQWRmaCZWJJixpx-63q; zUaHlxVh696$70tRyN5X(&MUBpq6Mr$E#TWMP_k;c92snij1FTpPI}GNDD2vl=H3f! z)OzPVG5s~Tfe^lmdLG26LqcG!`t^7lvvw6_6Q{@;>=Fc2Eq4R6eh-@)*c@O(uaVIo zfnXY~K(uMtPhn3N!a7}2{XRBF+0@Wv)TND5h?K51PHfbN*3$Z|fUA+xz02-|KWJ|> zqh70{E=0Miqg&5_^T{w=qB_K`MN_L$H==UQY_ z-3~NxyofMx+@*0MeixiX{Z6#HiA|8EbO|-?eDz>Dl5!MSd9%i0ZzZ%N6!H6EJm3zd3i*(7r+Ygab^wrrwf0Hc9d_K2NnF0B z^fQyVYIobw*M{%t#K~rQ=az_$Xst(cO>{HpsbLX2v@;lGnvy4_$WUvnLk{>c*w_It zfm3Y)2dAn~O{vL=?tZ9en203{Kgu*R7^mXAg|bJzo;v-?>GP*ky58Dq5`}%6u0?~i z2E#2KeycHKDxJHCS0tFy^OvaPN2{JBIej@XQaKQLUebo$TVcm&_yA7}J9J2yz7>$J zZAbI)@H@J&sgq zL?*aAWdeBc^=Xr|>B{l24jOZ2V~P-*5$8TNrXQHMEHgpQysyLK?Y|PMZ-onh(2%&FtW-G2j-^mq*6o_E3E`^Znx{YZ{DJN z1@R@$A7My>cOEPh#Yucqx`|@&j`-nSgzflD8(1f=+YzCQHRq;`m>s1Ym@>>0p*`-` zCH9y)x5RvMIr?`Tk!qWV@m}+`>7?q}mzY)rRKFW&g_wF|rwmjjWL+ZDb!i^v6TWJv z+QHpkf0WHnBfe!6yA`vWv_qbcCODAW;cSp+L|naOL~r0>3!zFbbTy$umb_`5HzHl8 zH%Nh)C@n~i8%*){9Osr8&n-t)_#c%plZvZJxL|8vAZ$rsDBTRUt1jY8h@NM$cmNgs zV|)dZKOH47=onMN_<9iT8j0A8`j~@ea*fbUibk4vC~8FQx|pL^sbslkldmdrye)|3in&mnL*bfgor1JLy)pllI`R*08qr7wSrBqmtV)@a@Lx@8#@9 zP$$_G3ryU=wbj;zIEj!}*YZQ&1$%i%Az;)fdo-pLBT6u_g6TF&L8i{*;H3c)=(vv2 z?2=qMs836a(LJg%NJ^XOpw*MrYUG->E}*WaiA8ZSnyeK%HK`awFW2E*o?RFOiUldi z@&4fuXI^@UG8`l3)>ufp86k>%%Vek~u3IAe9)1F*C^6+SXm= zP}i5zQdW2K;mXhAd+8p!p_or6jN7n6F>fiW%M~{;=DE@6vac{@uDXgCfz$7IvYSKcU6)1KCG)iG6KcNVIS@VWpZG^)+>t z0SJ;YRa00|ajN{+!tS8fMfqu%mj-l{bb#P$)B(#3w?_+-C_}Pax@Rs?V$83>+i%v~ z`HKP$VjCRY$b*nB?c1oBHe+tUfWQH8t0xOuoie23!H5s0OMpeP<9bmAU&kCG1wQ~$9tb1Y zD-w5ufWCw10WOl)frz_pEP0j%b^O=@b@3CZWM-IBI?#tA`H9Q8X~bvNfU|Q;5`dh5 zLe3)psEVGdH1aBwm1@+_;%X1~HW6;0vZJh?vnpO?eZ0C7+s%l@$CP3E^MapNvpBA< zM4kIEj5uMniz-qz!*yQuk>kfVR*p%D>GeK$K9H-ha3XLzsq&nRhh1Ck$5|EIGE9)y zhsp~t%OXr<(m6E3X$|Gsl%brytq;OZp>xph$Q9I!{R=3f+7Xp1)}c#%ym};+?VPNL zXE=ZH!SL*+)G&@7O_?{G#128ieDoBm@-Zv-asP3S?;WKeo~SbBc38b#6# zCoiWwuUEyZipv(zI?6HF*pNl0H)*(+ke`xEz95c|zD;;JPDZJ5T8K_b2evv8wCB#e zzI^K0`rAU52B}FQ5dLo$zjY>&|A7fo?g>+w4sH(i2D^jZ)3~odSz#XSUOe*-n7QtjE;vK%+WE%AmJ?I*(i! zI!OHqVv&T41w1B?XGc{Q>}L>)?geLD{gvV@dOuQUiLCwtn`QRW%-vzlHKwwPRBPZk zpcKY1YXoxzZr;v{A1fm_b+JNctaL2J6_#Sl@Jz_Vgva2iT+HF>&W?4+v|c1*v>3o@ z1l=6fNM&L_pa7=E7$y}^>+b_re-Diu{tlV3u9%N|7nVnaQv7B?i=h)8z6>hb%?q7E zPqvrT#Hp)QPD$=3s%1_L5Kp-PCNnu|k}%t@Bzu@oYCwj^g_j+$V}f;F_ulPY1WoYKgow2&fL*6$`Q!D%;s?otOQ zaswE8i;;Uo0T96nMBb?i3DTSR-G^vF-wKI}aG}srA|etXeB>|~ZE+EVifCI_(;3|D z@X|-jF$sI|1(roI(t#=5!h0&x5c@1ZRcbz!&^Z`X4*PjxFapZB9FE;BTa6Y!)+1AC z)ZENd1d*EqYH&n|_lprHf4hHDrpdr}bTVH~D7lBQS%U*JKt zn+FsRthtIlEd+bq(+Gy&G8JHZrWJFKgJQh&5_$lzIiblnKd$)9S?7eptea`{_fTcg z(O#_h8GY{p=q)-fJY7&KamIN@vtlJVhoDk;_M?bhg!>f}zL@iZS!R^-emB4V9j)81 z-}f(K^xQ5TGn$KaMO~HlD5rDtb35jCc!=j_Avl=JbVOmaHsY7K&E+t{9?gXnc*!}c zBdjt_z@2V{e*O%;^5=u@d8A2^wAWnTd&rI0pO1ufOgt|H2OR8R-dCK>KNs+14W5!( zjJJ*1;6b-SwG|J?Xa`qaEd+<;eIf9=^wOF`D7QqnR{t)W@1aqrAV2#%#{Xv#D;t0y&j{EFld OoE3_iLS!n@>Hh#=X?aKh literal 0 HcmV?d00001 diff --git a/tweepy/auth.py b/tweepy/auth.py new file mode 100755 index 0000000..29df5b2 --- /dev/null +++ b/tweepy/auth.py @@ -0,0 +1,156 @@ +# Tweepy +# Copyright 2009-2010 Joshua Roesslein +# See LICENSE for details. + +from urllib2 import Request, urlopen +import base64 + +from tweepy import oauth +from tweepy.error import TweepError +from tweepy.api import API + + +class AuthHandler(object): + + def apply_auth(self, url, method, headers, parameters): + """Apply authentication headers to request""" + raise NotImplementedError + + def get_username(self): + """Return the username of the authenticated user""" + raise NotImplementedError + + +class OAuthHandler(AuthHandler): + """OAuth authentication handler""" + + OAUTH_HOST = 'api.twitter.com' + OAUTH_ROOT = '/oauth/' + + def __init__(self, consumer_key, consumer_secret, callback=None, secure=True): + if type(consumer_key) == unicode: + consumer_key = bytes(consumer_key) + + if type(consumer_secret) == unicode: + consumer_secret = bytes(consumer_secret) + + self._consumer = oauth.OAuthConsumer(consumer_key, consumer_secret) + self._sigmethod = oauth.OAuthSignatureMethod_HMAC_SHA1() + self.request_token = None + self.access_token = None + self.callback = callback + self.username = None + self.secure = secure + + def _get_oauth_url(self, endpoint, secure=True): + if self.secure or secure: + prefix = 'https://' + else: + prefix = 'http://' + + return prefix + self.OAUTH_HOST + self.OAUTH_ROOT + endpoint + + def apply_auth(self, url, method, headers, parameters): + request = oauth.OAuthRequest.from_consumer_and_token( + self._consumer, http_url=url, http_method=method, + token=self.access_token, parameters=parameters + ) + request.sign_request(self._sigmethod, self._consumer, self.access_token) + headers.update(request.to_header()) + + def _get_request_token(self): + try: + url = self._get_oauth_url('request_token') + request = oauth.OAuthRequest.from_consumer_and_token( + self._consumer, http_url=url, callback=self.callback + ) + request.sign_request(self._sigmethod, self._consumer, None) + resp = urlopen(Request(url, headers=request.to_header())) + return oauth.OAuthToken.from_string(resp.read()) + except Exception, e: + raise TweepError(e) + + def set_request_token(self, key, secret): + self.request_token = oauth.OAuthToken(key, secret) + + def set_access_token(self, key, secret): + self.access_token = oauth.OAuthToken(key, secret) + + def get_authorization_url(self, signin_with_twitter=False): + """Get the authorization URL to redirect the user""" + try: + # get the request token + self.request_token = self._get_request_token() + + # build auth request and return as url + if signin_with_twitter: + url = self._get_oauth_url('authenticate') + else: + url = self._get_oauth_url('authorize') + request = oauth.OAuthRequest.from_token_and_callback( + token=self.request_token, http_url=url + ) + + return request.to_url() + except Exception, e: + raise TweepError(e) + + def get_access_token(self, verifier=None): + """ + After user has authorized the request token, get access token + with user supplied verifier. + """ + try: + url = self._get_oauth_url('access_token') + + # build request + request = oauth.OAuthRequest.from_consumer_and_token( + self._consumer, + token=self.request_token, http_url=url, + verifier=str(verifier) + ) + request.sign_request(self._sigmethod, self._consumer, self.request_token) + + # send request + resp = urlopen(Request(url, headers=request.to_header())) + self.access_token = oauth.OAuthToken.from_string(resp.read()) + return self.access_token + except Exception, e: + raise TweepError(e) + + def get_xauth_access_token(self, username, password): + """ + Get an access token from an username and password combination. + In order to get this working you need to create an app at + http://twitter.com/apps, after that send a mail to api@twitter.com + and request activation of xAuth for it. + """ + try: + url = self._get_oauth_url('access_token', secure=True) # must use HTTPS + request = oauth.OAuthRequest.from_consumer_and_token( + oauth_consumer=self._consumer, + http_method='POST', http_url=url, + parameters = { + 'x_auth_mode': 'client_auth', + 'x_auth_username': username, + 'x_auth_password': password + } + ) + request.sign_request(self._sigmethod, self._consumer, None) + + resp = urlopen(Request(url, data=request.to_postdata())) + self.access_token = oauth.OAuthToken.from_string(resp.read()) + return self.access_token + except Exception, e: + raise TweepError(e) + + def get_username(self): + if self.username is None: + api = API(self) + user = api.verify_credentials() + if user: + self.username = user.screen_name + else: + raise TweepError("Unable to get username, invalid oauth token!") + return self.username + diff --git a/tweepy/auth.pyc b/tweepy/auth.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ce77a5f1db8efcc43a0aa36a02a3fa57930d5658 GIT binary patch literal 6048 zcmb_gO?Mne6|L@`8EK@E$9AwCe_-020~vzxkPytr31Ha?*bqxOEtwB(`n0A?R!cM8 zldc-ulOt~;FW9qS2^%)-_y^grW5tFa!Vd`dzUrPCfpc<fL+as|xR*80J9x||NK*Xw)KICBr*=Fw^3{&7Jl6VZ7^s~+$<#S(L7h>s$ZufnDV< zTHa6cbg0YO`Z>;BJmv}#t+;Hg7gNQ`u@}q)D%z6S4EQ}h>~s&0;TNq^pP-Y6$#cr) z4J7O1@o*Y)W;(aoAhB7IhxqmL%RQU+Io(_1sf?6x0WoX3=x(?$-`uW`Te+P{sZ6{?OV)^03=rYHw!eUR# z3!9=-dFyr|;=06U5siMD1$5B=v9@vjY3^^egdVrkE+dI!&MJ;&xp6!y(#a6Th@GP~ zB=Z^kfBP8Gd>qK|n=QZHupE1H{Us=*Z3TtL$mkk{4uWJIh}cl+n$j(m2I>WhOOkJ> z7hvkLN}GbzwgTS>*_DcKj%%#xD7~gq&<*X+aHw=e@?Dj#N|&=L?MUVv;eH15FxTqo z{tuEJi43ukjI&$zX=Z`CTZ3X`XuQ6VvOnVsmnV4nBQoI2k*AJ5Wj%H1tMZ1jepU0_ zQ@Ly4FEqF-4W#C|FAV~9&|sG%RH848*;L@iL5l*z;u1;B40gn2R@6}-HBcW{(@{ql z1Z}Vv3gjiQH)P?$IXfL|CpmdGC{itAvpcogL}Y`dWyO~Firh>_y0mT7#DltYy?(!y zJ z;dD@hn-XAy5h?K80C5`6o9d{k4v?a8tLnl$-UiCUCLSzFi)9i9RHTpJ61;2(nz(VC zoVZOXR2UVPL`X(-s69g7(P1WSwYR>6S9@hKnlU1VXgKsn7ubysSAYoy`4X2G@DOYTwjSmrn)hfHd4q_J=fTqEE0ZMj&EzxMz z8OY<%fehVI2WzCk!I{c-TffP_rW#=M;fgB%qP*t~40F~Yfssk>so&1c8(qevqi-@1 zzH|3_=UDs(6H+ny7Lzxbh@qt2w^^idqSu*FEF(NBB7Q3#!Le3PXyzGKKce1s2`l9RZO=fyF@3ZYQvSPA#zL96NY*RRx!`0+&w# z7o36ZIr>t3y%yUR;6maX$c9=8tAFcz2$AK$6YeKQLTZ6PDrl;Q}lW>$| zLym=W=ADK1v!OWMnmozCW>1_R}}2umYmDB*HLdAO{~U&)n#99FkrC5hEE?=v;1_oMYzF+$obe;93yCkDkH>%azq1FrFc*d6w+puLb$*dKK9C zBI3bc4oY#H76aUO-C*t4nFzps$y~@p;tw9{B8f?U$lQ;R$VJG{u{s_7jCFUJJY>Qv zk@*rN_sUUXL}BMlXm&5hBTU0@1>dh#bPkV6@XDXE{5w zoawc=ws?tx*g+1t=3+Po@&gheKOjJ0AP5ZPkb4d}d_ni>-dcM{MSF~$mm~y@9*JL{|Zft|3-3=jf1|C4Kt<(vJs5?p=^ZO4kq8$->Ryz^0Xz;I%V;mtz7KN zc;Gf)iEB5SoWSIrGPQZl-UshIcy3>nLWMN&IfhUDOEeI`iEJAA(6CJd`I*QgkstqD zK7c2HW7B# z+D*u`>e$}6`{o_%w#)RunjxsXK>d3(7Wy;X3uak{<#-@$1~HGzBiPbQ1WF(lY6oR% z%oEBM?g)yP1w0MjfRyH#XL7u1W=xqHQ;v-((_&|J6%5)O8+1lHbCMp@&b*|v+F6ij zt$hN5JLMH|4tv*V$Nd-7_mvy3=DiZ0_k3%HJH*n`8Q< zWNIL8GNZyBlZV(dB&PutzEpjti;wF`C#0Cw;fc|ZJafdcp+xgi{Zmnh{T8HXD(tjB z+G|1C7IpE`c(0|=kUZM&&d|=+0l2j`b-Y@iMSBA(w}s0(iOb5OHL^r{N%h_@YZs@9 zOB_9}vaTA&opMmPBV%}JG8|cv^OindyD2Q#wn=KMx}Ij=xqkiX%1W&h7@)6MdIM*) zPl6-j8A>7yB8??9GeN-be%aUA)v~Y=p%~4vnP;=WW)Y3!@ctmnM~Jfy0z`zWiaf$q zRhCZU6!wmFK!uE#0fWS8Z94;8S-XH9V5?!rBXb*4t2FGiiAWvQHs6d+jYmnfRXawo z9roJjh&J^#^j5aZo?S_*og{PbuiUitjw|~sZo7ig>J`UuIb6A)70}?d{!pcj+Q`vo_UJDP?GZ?b}DBKNhG$E%C{QAqXOvrUC zy{8!GomcB8(XN+etCtkRmIfh(M5`Y3`*~*5OZBthxHHII*3WJ0y(Ayly0uwWhs>?3 z^&z{aJB?9?o2jqZV$icy;%syV8%B)u5rMC~Z^Ddwn*BVk35v|wUfrfeq86J=Y@R}+ zXwT9;{RVPi8yMlS;1ki+U&p6Dk48+>ESp&~8HDD7Ic`n{Ix=%+Dd_Mp;`sn7pJTB1 z8S^Y+EA0_@ocxYimGMdQ7EH1a;~=mGvJ*&^s;$7?kew+GxpG3fu;h`xhXKG=ydlGL ztR1=tqq+)PEpyXQ?iH5+s~LsWrwXfMg0v2XF%XQreX!^?FCt6eRlHr4cZA zyVaTo`e0wZ7U`u@pIj}ESY4q2Bn*mlga@|`d{kL-Kew*}6m8}=4LC?$Oo=fp+Rp(2 zjeeW(U1YzAw$>!RI3iTwzqAKKv& z1TlQrh9AX{@{L8*tLc`MKyUMS7Z6oq?a3{R!d8U^x4K8AkleQvF8D3v9bA-E+$S#AwwSDE15)QH!8GuPyYBm%&K0 zSoNe-e=sP2JqHTk>zEsUi!rx*L8(}c8r56pJnIRvUWC>HL#7s^`u-%ebr~*uNnQ3_ zVD|CGa8F5!YG~@qPJ3CZ*Mf(W(!HX4UDj}MH@LfdHz-iI=me@!#uQMg=d05Fn({u) zKt$a_t1m;*yA5*>3n&uM)*dac{vBYw->4Dp7%4vvP}RZ;MIeEh#r*K%Xi@hW={~Ex z&+#Ti&$OC;e3`!oNNcFk{dT7@TOj!0ZV#aoE!0N^C*;;G_Msy5pO+NRB}ESO%gc1b zVO!h_(!D0>ytoy4h)NPESj;}uG;_H8-flM>AHNwM=n`l<&?PwDKC~k4@FMTS4Kf`Kc{oSi|#PSK{1R5>d z=>IP3*>d2Eq^(d>~)O-M9lg4$@Pud#^r zlQ$Cs`&cX3N{S_}a#7n#R>XK zbeRnc*@%@$)IsC5rZIEn)4l=Moxa{ zPFhlbq}2&4PE^Op`Y3YnBLOl9U)x;ae;o5dbEPil<|*| 0 and (time.time() - entry[0]) >= timeout + + def store(self, key, value): + self.lock.acquire() + self._entries[key] = (time.time(), value) + self.lock.release() + + def get(self, key, timeout=None): + self.lock.acquire() + try: + # check to see if we have this key + entry = self._entries.get(key) + if not entry: + # no hit, return nothing + return None + + # use provided timeout in arguments if provided + # otherwise use the one provided during init. + if timeout is None: + timeout = self.timeout + + # make sure entry is not expired + if self._is_expired(entry, timeout): + # entry expired, delete and return nothing + del self._entries[key] + return None + + # entry found and not expired, return it + return entry[1] + finally: + self.lock.release() + + def count(self): + return len(self._entries) + + def cleanup(self): + self.lock.acquire() + try: + for k, v in self._entries.items(): + if self._is_expired(v, self.timeout): + del self._entries[k] + finally: + self.lock.release() + + def flush(self): + self.lock.acquire() + self._entries.clear() + self.lock.release() + + +class FileCache(Cache): + """File-based cache""" + + # locks used to make cache thread-safe + cache_locks = {} + + def __init__(self, cache_dir, timeout=60): + Cache.__init__(self, timeout) + if os.path.exists(cache_dir) is False: + os.mkdir(cache_dir) + self.cache_dir = cache_dir + if cache_dir in FileCache.cache_locks: + self.lock = FileCache.cache_locks[cache_dir] + else: + self.lock = threading.Lock() + FileCache.cache_locks[cache_dir] = self.lock + + if os.name == 'posix': + self._lock_file = self._lock_file_posix + self._unlock_file = self._unlock_file_posix + elif os.name == 'nt': + self._lock_file = self._lock_file_win32 + self._unlock_file = self._unlock_file_win32 + else: + print 'Warning! FileCache locking not supported on this system!' + self._lock_file = self._lock_file_dummy + self._unlock_file = self._unlock_file_dummy + + def _get_path(self, key): + md5 = hashlib.md5() + md5.update(key) + return os.path.join(self.cache_dir, md5.hexdigest()) + + def _lock_file_dummy(self, path, exclusive=True): + return None + + def _unlock_file_dummy(self, lock): + return + + def _lock_file_posix(self, path, exclusive=True): + lock_path = path + '.lock' + if exclusive is True: + f_lock = open(lock_path, 'w') + fcntl.lockf(f_lock, fcntl.LOCK_EX) + else: + f_lock = open(lock_path, 'r') + fcntl.lockf(f_lock, fcntl.LOCK_SH) + if os.path.exists(lock_path) is False: + f_lock.close() + return None + return f_lock + + def _unlock_file_posix(self, lock): + lock.close() + + def _lock_file_win32(self, path, exclusive=True): + # TODO: implement + return None + + def _unlock_file_win32(self, lock): + # TODO: implement + return + + def _delete_file(self, path): + os.remove(path) + if os.path.exists(path + '.lock'): + os.remove(path + '.lock') + + def store(self, key, value): + path = self._get_path(key) + self.lock.acquire() + try: + # acquire lock and open file + f_lock = self._lock_file(path) + datafile = open(path, 'wb') + + # write data + pickle.dump((time.time(), value), datafile) + + # close and unlock file + datafile.close() + self._unlock_file(f_lock) + finally: + self.lock.release() + + def get(self, key, timeout=None): + return self._get(self._get_path(key), timeout) + + def _get(self, path, timeout): + if os.path.exists(path) is False: + # no record + return None + self.lock.acquire() + try: + # acquire lock and open + f_lock = self._lock_file(path, False) + datafile = open(path, 'rb') + + # read pickled object + created_time, value = pickle.load(datafile) + datafile.close() + + # check if value is expired + if timeout is None: + timeout = self.timeout + if timeout > 0 and (time.time() - created_time) >= timeout: + # expired! delete from cache + value = None + self._delete_file(path) + + # unlock and return result + self._unlock_file(f_lock) + return value + finally: + self.lock.release() + + def count(self): + c = 0 + for entry in os.listdir(self.cache_dir): + if entry.endswith('.lock'): + continue + c += 1 + return c + + def cleanup(self): + for entry in os.listdir(self.cache_dir): + if entry.endswith('.lock'): + continue + self._get(os.path.join(self.cache_dir, entry), None) + + def flush(self): + for entry in os.listdir(self.cache_dir): + if entry.endswith('.lock'): + continue + self._delete_file(os.path.join(self.cache_dir, entry)) + +class MemCacheCache(Cache): + """Cache interface""" + + def __init__(self, client, timeout=60): + """Initialize the cache + client: The memcache client + timeout: number of seconds to keep a cached entry + """ + self.client = client + self.timeout = timeout + + def store(self, key, value): + """Add new record to cache + key: entry key + value: data of entry + """ + self.client.set(key, value, time=self.timeout) + + def get(self, key, timeout=None): + """Get cached entry if exists and not expired + key: which entry to get + timeout: override timeout with this value [optional]. DOES NOT WORK HERE + """ + return self.client.get(key) + + def count(self): + """Get count of entries currently stored in cache. RETURN 0""" + raise NotImplementedError + + def cleanup(self): + """Delete any expired entries in cache. NO-OP""" + raise NotImplementedError + + def flush(self): + """Delete all cached entries. NO-OP""" + raise NotImplementedError + +class RedisCache(Cache): + '''Cache running in a redis server''' + + def __init__(self, client, timeout=60, keys_container = 'tweepy:keys', pre_identifier = 'tweepy:'): + Cache.__init__(self, timeout) + self.client = client + self.keys_container = keys_container + self.pre_identifier = pre_identifier + + def _is_expired(self, entry, timeout): + # Returns true if the entry has expired + return timeout > 0 and (time.time() - entry[0]) >= timeout + + def store(self, key, value): + '''Store the key, value pair in our redis server''' + # Prepend tweepy to our key, this makes it easier to identify tweepy keys in our redis server + key = self.pre_identifier + key + # Get a pipe (to execute several redis commands in one step) + pipe = self.client.pipeline() + # Set our values in a redis hash (similar to python dict) + pipe.set(key, pickle.dumps((time.time(), value))) + # Set the expiration + pipe.expire(key, self.timeout) + # Add the key to a set containing all the keys + pipe.sadd(self.keys_container, key) + # Execute the instructions in the redis server + pipe.execute() + + def get(self, key, timeout=None): + '''Given a key, returns an element from the redis table''' + key = self.pre_identifier + key + # Check to see if we have this key + unpickled_entry = self.client.get(key) + if not unpickled_entry: + # No hit, return nothing + return None + + entry = pickle.loads(unpickled_entry) + # Use provided timeout in arguments if provided + # otherwise use the one provided during init. + if timeout is None: + timeout = self.timeout + + # Make sure entry is not expired + if self._is_expired(entry, timeout): + # entry expired, delete and return nothing + self.delete_entry(key) + return None + # entry found and not expired, return it + return entry[1] + + def count(self): + '''Note: This is not very efficient, since it retreives all the keys from the redis + server to know how many keys we have''' + return len(self.client.smembers(self.keys_container)) + + def delete_entry(self, key): + '''Delete an object from the redis table''' + pipe = self.client.pipeline() + pipe.srem(self.keys_container, key) + pipe.delete(key) + pipe.execute() + + def cleanup(self): + '''Cleanup all the expired keys''' + keys = self.client.smembers(self.keys_container) + for key in keys: + entry = self.client.get(key) + if entry: + entry = pickle.loads(entry) + if self._is_expired(entry, self.timeout): + self.delete_entry(key) + + def flush(self): + '''Delete all entries from the cache''' + keys = self.client.smembers(self.keys_container) + for key in keys: + self.delete_entry(key) + + +class MongodbCache(Cache): + """A simple pickle-based MongoDB cache sytem.""" + + def __init__(self, db, timeout=3600, collection='tweepy_cache'): + """Should receive a "database" cursor from pymongo.""" + Cache.__init__(self, timeout) + self.timeout = timeout + self.col = db[collection] + self.col.create_index('created', expireAfterSeconds=timeout) + + def store(self, key, value): + from bson.binary import Binary + + now = datetime.datetime.utcnow() + blob = Binary(pickle.dumps(value)) + + self.col.insert({'created': now, '_id': key, 'value': blob}) + + def get(self, key, timeout=None): + if timeout: + raise NotImplementedError + obj = self.col.find_one({'_id': key}) + if obj: + return pickle.loads(obj['value']) + + def count(self): + return self.col.find({}).count() + + def delete_entry(self, key): + return self.col.remove({'_id': key}) + + def cleanup(self): + """MongoDB will automatically clear expired keys.""" + pass + + def flush(self): + self.col.drop() + self.col.create_index('created', expireAfterSeconds=self.timeout) diff --git a/tweepy/cache.pyc b/tweepy/cache.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1681ad1d2199453a14f70efec72f49350af268ab GIT binary patch literal 15887 zcmc&*%a0t#eXgFFeav!~%ZIoUBPwn^y|$^PBgvFaQ<2Ce^@yyY(kq$rO2T+{x>wt~ zvopi)9xhiAfxs)sIV8x*fDZu@7)SyH&MCJbIpmN_kaI2pa><{NLx5b8@B4n;J+lwO zqGs)o>|%9wb#>M6`PHv}Q~j^G8Sh{J;)iXQ|Czx5ui;9sdd`LT=edq^t7YADt6tGw zajTW0UUjQgw|lC%t+~|+cf0$9t3{J;YsMu_=XqW}-UJ_ZJ@>(||CC#u+SmV2NBU3m z;r{-0w^~R4)8+Vo$NuaSR$MgW)-lPd``o!b=YF($!i6;#olxsU**fdO$)a_tY@KuA zbkSNbTTi-hrf5A;w$8h7wrHIzTR{ZRtt_4d732TKpGHQaF1OknQAy?sTCS7KzlS0M zfkjl36P1el9AvKMD_E^@Mqy2jlP=T(KoCqG>s!SmO&;_JRu=0Va~_30=f~YFO4eKL zD87oeHosb2%j`La;xo^E=3G;(URmVh^g|SHb>l2dGWKii1>Hm5)1kNnBv?Wi}% zuK3--_F9zqy>&m0+P!X=`dQE4ilV;Xve!aC>SoDq`F3LwvgNWV7B+}Jo7MR3sIwmM z@ggoO(&tgTw9(s+F13=aR-Ao$=|+@pWxf8TY$J(UVR|Xs!RK}_X_8C*-E10fG@CJ& z&}{msr(Bk+wW}z3@dF~j3@Vv(KLORrGp+t3v|S5BzZ>oN309PZT+m?xY(=|QY-L#O zaCIcn-BxE1UGc+K)*@YwT>e>nrqRpZ+U|FvZS;u3n@Q437CFCw6UkV^$PyooI2KJP zglX1GqUTrt38@so&~?R=o_j{WyujlFcN9Xtc8tw>NT1 zfs&s@*}gudAocD>NfL(;IFky0C(bq?eQ|08^*`+Ov$)r7bv`Z!Fj)#HIz!3@T5wM9$;#|xeQI{9UaQwKe&L;HyO-=P zAMKvtn!bd4f^UK+nkak|oRs+{I4QVZxF@)?I=vXX&A3p0m0o#9Ewl7ovz#J5*IV7^ zw>2eW;(pIXqDgWuq5{LcDdz zu>yJ5^JGRX1>hF@NjD)2NMm*WX=Z(5TQ`vtr>^F`q)deGg&v7V zknvz#VWNXa3ZaAWVz;+N!xKaCY$*~43aC)oOj-qSVJ4HWj^}}ihWD` z^vAl#Lbb9z&)r9*>e7pnLUI&nYuY6@)NjJwueh#_G0A@D02`Bp`6yuaE2IaW06Cst zxs99rds8Cm2Yf1<(TJss)0mM7^99x<(Q2d76aRz9S);99tg$9h7o|Bs@LPO<>#L$- zxfeqFl$OOoxT}>j@CUWptT$UdQv+4NNRoL`B*C<;`l>crSi^s zjYUdWz@-JM5vUkET40URY9tmHq%PBJqr*NVzZRrtqFyv%t$#(UKI-n8A z35@_uhMW+&Fyw?_g`yFwh)e^4-D6)h(Pu+F$}UX`;hJf+Gbgk1EoDHqKp zJ--H99gboyig9sCwEi0nP|O^2$sWpc2yH==U>l$mw4vX1^eC{KYV>GFEy^&(3Sie} z-EB>}q@CB%C`2&hx?gi}Wt+3qKnQDdM#4NZ?01zTW8I$Hf;s-B9-ma#dDTy;e%f{4 z&>YYc?ifA)O;68^^gLVie1dFw<4x+uBO}kx<pFB*?4kM(v1O3*fnG?6-?;2vW<+6(ze(# zHLtfiKq*wp?X56QB#UjzVH0=3(`Xk4uqI9*HG@?mDUHm1Nk*PG*`>J-&NppEv@yfR zjIT8Z-GjXZmqwZoj%C+UsnY9X`@zoZZN#Ni<6G+z)F0ehe!F?|gJFH;&7p(_ zBx&#s6nd-O>7`L1Ign72Ka%9&D!Wh^WJ*fQwk}&U7GIVy=-=X!zjoe?RCocZd=|LDfRfJr;rz(@)sp_0JZQFQKgx4~z^mP=l zaR^}C`(9!7Kv{^Zhv3Gu1pqZ{0n_-ByK`RgDp<>^dl~QH{x;Z>BLb0l2n#GEMWK^N zmy7S=;e;%zBvgtJ^8!3~u_IHEm%B#Ln0e{kjTj7fy>=DkVpq(#B0hSW9@;saFf z{r+JRyh7Sweee*V6R46bxjSczkR5Dhp(hkj!|9TAD20c9G>Cb4Zb+3yYNtV*fHmzr z7)cGtO%u!Su_hnQX_`ch9hsZKP!)(|$_cof8g0txK3cqwhI@^}w7BwBXhFB|Ktf1c zq>Bd#A+3@Ikq{$(bZi>X{Y)REL1R(p0taG47tz%ML~>WJ^7bVbMC!p33qr5p1r~%J zfu^lRk#%;^CZ19fk28k!G{*Y^m$sRRAyskSwt)CiwO*Z_!-Z3~`b7OS?l07*>yxmo zI)Pi}MqWi54f;dQU&r;EO%bW$i@f;tWqzGO3aA;h2q_|Pm#hep<=GN@c>XJMMcSPh z`;068d&nfg;a;1o^{ufy$!EGinF{a~a!NR3p%fd}A_iQF=_pSOjwgz6nnFs`=_YV& z$1m_I4VI4^+8q3ofdb87pv(&GRgeTlyumlpk5K$N6HSEA8fLw@d_^1(Lpn$rA)B!s z9<}Q{Nkc}sVQNV!NsRuC5(WmyfLHoUJW;{~_%A*lw zl;%exI5@G45+rlr@WV1naByN7B}jp^a}ng2*trODOzd3b1kX9>W7Dsp&@}vH&}F=y znM^Gt_+ffTTuhLArc{%Zl(l@qE683;Q_?TLZ_`rpqWNCNm2!VTHw76~-ug6hhZ_=s; zrB4ff#M@OAW9?bEtjpx3!BD29jCi0O9dPDR?i%EpE!tv902XG#RuWrl##(g%j3pwj0x!ia19*vOi?Nmgmb?i8 z77T{%^{|(1t0BrcKzbgZUt!**@@T={i+MIbht zh~PGcvyCqoh_jM@D!K~#Sn`Kxs(o?V3PZ_;Hh9s!s6EJ{gUJQ-{OZkbj3qx*mDk7$ z;&oS@tISuPt(?VQF=e0P`~f-~%@mTdOwRZQ4Z?~s#$pNxhf($rnm|b8U5znWge<@c z@JJoNpd{5(MMJpR;YfH0&YaHy821cs;B;%B(&{6i$E;j`J?U+Wb7m>B z)>mi$LQ=#p}+ZZ>Gr6W68?9c(W!VGz+5Mw91Bf2bmK+}+Mh(Xt4ZDCXW z#@}&Mqi>Nl3#hotoMnDg<{5}U?#G;08~W%{(HTd9bu0CS6$N&N(Ct3DAd4Bv&hIf# z7ypEzwsj=zCx zn>nd^XD9MES~yvvt_Q!(LN=5OO|i#7N_r~MKgO&S{Hh>_x79WcFbP!>>lNIXL^Dlh z^oUSYD=Y+J+7M95xCcRIJ_^9y&|Ex=VM+|-_trL}_R&I^Co{?ie2AA!;SuRJg{M}V zs0G-#9m1$TW*aUm9zJg_pzq!yJxFk;Ll6`xIZ^l!vQU!TBn5{7#|cdeOUc$KAQWf47Gey}-FxT5&0Ld6t|c zDOY3>GDKg|&$O9}9JE-_%a|f_;X@NDs#6{f`i~t2A!JHORyuxHIftMjahD zxbYHkdGdQSaUVD0o-K$AlH~;2N17)wKp||%?(+@R(pUxA_He)+v`tQ zqYWENsEEBF75VvzR-b#r0}&v7SUfoj-g0io8Ps76@`1KcX_ci>Gi-K(5FJLKe?x61AlQ0p=Rs z1s;>n%9l%me}K`yU09QicdkhG;0I4P4T&1C~m2| zuVK_f=+;6&il{;pi>zL0FDx6>4t`d`hEPWpAc&)-49Bf(^ah=fKdGSy@LT>W63Z1{RIWZj3NPaoT+;PpeqDs>Ae%>ECr`npdxh(O zhrl_G;Hc>>5mAv`0*n(wpF!)Yy9{l?X6Z5xW*|401#=kK=I0vv^_kqCz~S|{+d`%> z46Ts}dkQ<|;AE9Mw3ha|OKaAx&qh9BVhZDO8e@>P;faMmgOBhqzyd7l7I)#TGfS{1 zb%)8mfrcV!@T%#`+qF|s1=}cR!J8;XjvU5zH)|O0&qrp6>i#$zrwS3tcXCU( zE6nLm41d`gWWDWH7PkRrcM;DDlVCmTBr|Q%AZ4^#x>NPu%<{l_WS 0: + self.iterator.limit = limit + return self.iterator + + def items(self, limit=0): + """Return iterator for items in each page""" + i = ItemIterator(self.iterator) + i.limit = limit + return i + +class BaseIterator(object): + + def __init__(self, method, args, kargs): + self.method = method + self.args = args + self.kargs = kargs + self.limit = 0 + + def next(self): + raise NotImplementedError + + def prev(self): + raise NotImplementedError + + def __iter__(self): + return self + +class CursorIterator(BaseIterator): + + def __init__(self, method, args, kargs): + BaseIterator.__init__(self, method, args, kargs) + start_cursor = kargs.pop('cursor', None) + self.next_cursor = start_cursor or -1 + self.prev_cursor = start_cursor or 0 + self.count = 0 + + def next(self): + if self.next_cursor == 0 or (self.limit and self.count == self.limit): + raise StopIteration + data, cursors = self.method( + cursor=self.next_cursor, *self.args, **self.kargs + ) + self.prev_cursor, self.next_cursor = cursors + if len(data) == 0: + raise StopIteration + self.count += 1 + return data + + def prev(self): + if self.prev_cursor == 0: + raise TweepError('Can not page back more, at first page') + data, self.next_cursor, self.prev_cursor = self.method( + cursor=self.prev_cursor, *self.args, **self.kargs + ) + self.count -= 1 + return data + +class IdIterator(BaseIterator): + + def __init__(self, method, args, kargs): + BaseIterator.__init__(self, method, args, kargs) + self.max_id = kargs.get('max_id') + self.since_id = kargs.get('since_id') + self.count = 0 + + def next(self): + """Fetch a set of items with IDs less than current set.""" + if self.limit and self.limit == self.count: + raise StopIteration + + # max_id is inclusive so decrement by one + # to avoid requesting duplicate items. + max_id = self.since_id - 1 if self.max_id else None + data = self.method(max_id = max_id, *self.args, **self.kargs) + if len(data) == 0: + raise StopIteration + self.max_id = data.max_id + self.since_id = data.since_id + self.count += 1 + return data + + def prev(self): + """Fetch a set of items with IDs greater than current set.""" + if self.limit and self.limit == self.count: + raise StopIteration + + since_id = self.max_id + data = self.method(since_id = since_id, *self.args, **self.kargs) + if len(data) == 0: + raise StopIteration + self.max_id = data.max_id + self.since_id = data.since_id + self.count += 1 + return data + +class PageIterator(BaseIterator): + + def __init__(self, method, args, kargs): + BaseIterator.__init__(self, method, args, kargs) + self.current_page = 0 + + def next(self): + self.current_page += 1 + items = self.method(page=self.current_page, *self.args, **self.kargs) + if len(items) == 0 or (self.limit > 0 and self.current_page > self.limit): + raise StopIteration + return items + + def prev(self): + if (self.current_page == 1): + raise TweepError('Can not page back more, at first page') + self.current_page -= 1 + return self.method(page=self.current_page, *self.args, **self.kargs) + +class ItemIterator(BaseIterator): + + def __init__(self, page_iterator): + self.page_iterator = page_iterator + self.limit = 0 + self.current_page = None + self.page_index = -1 + self.count = 0 + + def next(self): + if self.limit > 0 and self.count == self.limit: + raise StopIteration + if self.current_page is None or self.page_index == len(self.current_page) - 1: + # Reached end of current page, get the next page... + self.current_page = self.page_iterator.next() + self.page_index = -1 + self.page_index += 1 + self.count += 1 + return self.current_page[self.page_index] + + def prev(self): + if self.current_page is None: + raise TweepError('Can not go back more, at first page') + if self.page_index == 0: + # At the beginning of the current page, move to next... + self.current_page = self.page_iterator.prev() + self.page_index = len(self.current_page) + if self.page_index == 0: + raise TweepError('No more items') + self.page_index -= 1 + self.count -= 1 + return self.current_page[self.page_index] + diff --git a/tweepy/cursor.pyc b/tweepy/cursor.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9fe446fb92775d26cdd7a5cca11ce2346e1b4b85 GIT binary patch literal 7164 zcmcgx?{6GO8J^ucpYIZW!wY^>2XWy^x zOxmPM`GAA?fIvd<0r7z%!Jor_!yizd=b7ETvtzf^cDQ6FGqbZh^S;mfKJPn|#(yqd z2>$(#pZ87qYvA8^QS4t)W%wJIp)sStYy~FBLX(GPJ1|+pJTvCNm|tu)OxCPM7wXZb z$yz3FYVKk^x?r+)HQK31TPC|ujb5xr7frTQjb5rp+a`O-Y<8Eixxr`nbpy2A1ym0| z%JbF8&W|~>>cf<+6yM$>@M)d zucG>Cx-%H3ZZH`~yZLaM7g2whTHEK>n`QY4C;t^D() zBiV17;+jA-W)H>yyzofv%`WHL*HFc|dsK|0N*56{N`>ZD-`BjD%93=$!D!(8*hRj3 z8P(Z@rOvb*ox#%0Ot=IoC0K(!6bW`64LSe>fCB{Plgiu^G(uP&{noyL>Nt@g-^fOT zag?Y1T?yFb*B!A2&Z8P@%If9f^SBn%@$-YT3#*NM9p5U2T@E@ymvWc5kJC|}B;uch ze)DL^E=@W~vPnNlV)|D6GOJfn&32^LDGV;AWL&^f_EyjeJC~MP(#(U@)?U!ZL;X1Y z0gAncswTc?&v73|Kz!o$EwLV5sLzU}2A2CKaX~^f0H$meB*up5lnu}+>4k6gylL_y zG4YEpv7(cE;PgPi9ez4f)oHXuwckdOmJCqipFWjQ6`3hL7G?@|)PDJ_=R?XXY);(9 zXgbVC`Pk)|Vv8Qi&d#(y(a<=5?B2vOmTEFzW_K~U;5@f7E%FaZekX2)C;?dSw>>XA zwRH}vXyY?rP|$mlyfxc{+${bI*H~dCNBSCD*H~RgHG@E{)7S}P+3P6Yo5D`3)95r} z`q%Rw#Cc!nK}Z)Ge47d*U9zmB*iBS0HP9jSI1Bwv6To@)v?7jsP|MBdvdKup=BHpi#a$y8R?n~ZZuf1~*&Wr$~=Hc<7s z>;B}?*eUn%yn5I@#W>cbMfHst7^lI3=)R3&>5(Q_39cxF%450vT8)202WmiwL}rCl z%HCBsFvWc}q%$a)S=N06$(i~1SCrTJcUdE4LftZdG{G;SbswWbgEq#KMb#pDfQ1O= zmn(jE=$lOf*D&vd3~ND!hA{Au&_}u*RPi@hF$BjqQ8}{UCNc{j%8*eWCC7-fDEcrT z%cO*YAk0#iD#&>B&X&Q%^r`Qml$pz2QRR*B3Lxk(wo?Q$R1HCjj#p!;lN$!{7S;+a z&9p7W_-U+VvwKZxyFqPJ$%24!1VIK!Am{#en6$hmxtxz3_BNX9>DZ@{$abRl)BZm4 z%_6@Sr7qeY6c+g<9?a=n9uVP0<%36+D=>(|TzbS+&Su0UrVSKJSMfPuC%6+X2i}WL z;d?4oP(y+1y2wZJs5NYkxFwyV1sbHX7#EMs640268Eo_;B9pfc7K33z%M)G(BUp zm{v6h%MEr9$BXzD#%4U`C*S56imy(C63J@|#lFhVgx7*j^LlVY)U_1oXu_QzLhMf% zpqBqP#5_|$vrwkbd*=fO;Q?A69)GmoJ{g|2;887j6gWzF=w|jERG$ye4vwM7l%7XO z{vdFKm5(`fxS0d!7+Cb~95!;TvtvU#gqG*92|I_ddEvB33MBtmwAkugj%m4l6#dSL z7HR&+(;}?~E{R}x~m}K zp9|q6fwS|;ukn>K4_yv!2KVPrj@;ckpZ^$@R|gr$`@Mi)a1TX^2raL$AmQPXrU9>KSnAp>Ub)x0%rMFatM1rKt zB0piA<&S*|!_D{sn{OZS2oV2ZtAPazp7!Xs+@dtD!}M1m-{sP=QMSp*MdNYa)h1ryv)d02B<>63l@ zs%%3F{yQd>3dUV?TO0uj&~b7;D}*YjxI&YgJeih?gSB`C<6^)E66ujRRIebxgmUo| zAN-)G^@|xgOlf`-8Kv*~%+l|wLH&|IdO8$T2dtgRiN`C^vZaCdCOUrkpp#LP??iAj3Q+45sj-ez-}QIW-zUsEm0k{4uCKhj La%1I z)qvcEq>CPUrn;&Kf4Uv@{q>U3_7n2=J(oLY$cYTmNKvlnlW0x!I^;t@g9!J0hvE6n zn5tLTw*KP?_~!>Mcges6c*7V_9txA9VFN5oIc!ogh#I(j#vo+N3-g|t$``iGux3|} zxOK(giB^i%idICLF_0!8AJU|O9@BsnFC9dRAM~Rpo*gkP$|AG5@h$FalR8`aV`*Jw zOJ{tW553)b8EIXVh1YsxS)`iTD1pfY(ju-6xNGj>v7MVZt*2?>KjJ6jrruVuAM>)@ z#U5!bV}rbIs$~LQ*zcf+)TwH#42pLpo}rsvP4;#$76oE2!Q(?@NHSVGUu)fEE(dd@ z`Vok?A(ZLdXf1!a)^nRLW;jBc*165NM>Ks~8Znh%vezWtKk{J`LPnBLOSOWNWnN+`o?!zJJb* zCL$U|Kq@yqTnQHZ47McKikAFn8<$+h7jqSfugDzW zjcS4P=C7kZERZ*RTX(PDYetbDx-Y-|ntP|~hg*vcf}1>7*y%G!BK$h)KG?FL zZWqX$&V6~W71jL$Kh#DD!IEIGvM|avOiXvf5A_IN$C1=(%}z6_)uJ)JwCkI-rY9(> znP#Wq@^tcUCMX7v8WtG-!}f{lw22vGOT z#fY@ry4VgpzhxFTwlq6Ez_!dNXU>^$8l-$7egy0<;RSHrRw|s0GfRAT2>k;9@2K#C ziU2%H9JQ2#p0n44Fe)NxdPe>S7!kizf{wqyp|tP@IYuDq8yxa0nb8nKsxaYw}Ss^a$<%(mzm22%oO(Lqe3Q~{)1_B=k4v8>EH;1VgS=g&C*l9B`(c5hqpk#Iq2Bi~x{75LvRIm}gP;HZ7l9cVAz6tCAR% zW+w_zBwE#Nb}$eW_+rJH&^a`!1kr2ljh5f`JCX0@Y$?&wXc;}v(MaCgCg=o4Dn_r^kx_Wa#Js455rP6B9F|A}LE??yhOzoBU9WNSK_K z^5Idb#9_)QD(h6i?e@f-f@>A~Qe*U`$S}e^1e%)p3VZ!2#00znAqPv4fj)M?K(~dT zuHO8>`A&hH^k9_C1XHvQQ}lh-4zG#t1e=7~G6!%~r%2X+P*V3x>Nc8y?MBIM55^Qg z1EXPv_s7)3qH^MQ)UfY&=4FJUjPO@+b&LtNU>}$s8pV}Hq*B>|Pl<2qB^CZW*I+!? zAReWp;Ev_G`6M15{j)Ti4loADvysMRq?fbm;E6n7xvJw*^&jF5P3>-$ zn?yFKop3fUKzt(x0uVrMF*gxdGIvGvOxM7WillYHrJ>m127E-Qry86AelbBeZ20hX zmHN7Tw*|Fs*b;|7^yy8q0X1vah_T#g)f>Jhk1o)ZNg168Ba#X&Oix3)7aDF zD)lDwz`DJ&|g-CDQ6ze=V|A#GYh}kzD+r0XTD(HPoZqeIaQb| z96=2+x||;26CtC9p#sKlqKU!Li9;Y5plOx$nCF`)47OrQSWix{MQu3{R6>Bb`|qN& zypPb=bg4vQ)*0qJH5YvKVN8&kE}ozn>I^TTV+ifwD4anjR#+i4cY_cDHq42NN0xNF znoW48;Uiy(km{G1BLVa!CYO<9OeeX1L?8h+3|4Qa4TF;nQ`*f{f|~|&l$gw% zWwAE#r|bjjaeZ8cpXQob`*r6Zaz&-YgfY4yGl;Dmt%+EV5x`T?7m(@~kVvp%C1|yR zd(F=30>Rc?DCn;;5$!_?S~bqL*F!{w=uo^;Zy8_0G?`x~s#=t={zbOp@+#w@e%R*> zdT8S@)RKuhrb>q5g>s$~#oI`zmU_nTqB1;VPzs2E8xkI89!^eYk{PhlJYGPZhwz<8 z)aH3&>*~!@vAYAehDtk66nf3i1BWBYl1Pw1(_?IOh-SHoc#d5-hykSYDeka7i;sfC zod|o16QzEqeHCw5pJUypkqm~p^XTE%@EE$bTqaEvCJ{@ScAm^)DJ19{tV27Snkit1 zd08_&QcevsPo6Q+^BG z|IUsnQx#CmuCSc!yqjGfB5r{78$Kk#Q%#JD`Eq(`JEagepY(Ohob2y-$Rg< z>*((%c=EC`1c>N_TZl||89+2QqGr%B`W=*Kfaq`IbqGM*KsSGcXUISh*A|F+tF=E< z7n3Gq{P7UxzeCA#jhN~zY>dQuOZbkk4GaBM^!2CN^cN(ulLdOHPxKikG?4mDCREXy z5~IJxWQhq`QInLKzJzA4nJSW8KgNJw?>Zh+Mp7!w9G;o7{_w;I{)uNS7WHZ~#75Vf zzA^Pxzk#~)Lv?x?k8zRsz1(E$Bf~+4Kg-&wXG9Zs1&{eL66gdd0eI4k`uqm9LE_JD z_V8!TixQu)ZtHh)&ZdNa_I6vpm#b={Eoms{v%OQh)`vAlbc37$R5UkWog;BuZ74DG zQHfGZP@1Tn{?%!e$zF-q9;QqBm+01TtHrvc$LWqDCU7LPEX2MboUE5BVP+%*yE_6v zj!2A%C*+6z75WczStzT#zrj$kcCy&)B_3R~4T0m|LpOiU@o0O&@lz7Ac_lUcyP5kK zD)wY<@H9y*JUxyK?VO;P?p5a1XWR|Peld>a^k`jOoON_n|g;7ixh zMA~KitCvxh(WW0obOt^AJ)W3}CyP^97aXx^L1v!|j&mpxv20Aq^)v7-^!0Zfm;zEN zh+lsN5zAFX4g2H;uGdpuco8od5ux9n&FeQyeTi!Cse{O4m7SE~7}{mRrIHOvjw9EI zr=Aff!%;uzAzl6jPrPHyfa4KoKj}f7|1$@S>2U!0rTSgpTk`8+V{JD{TvX^Oo_pFJ z;!fmr8JTUgc`HwwmwVWr@H1j5dlHb4*bs+x?o#=Zc*dR>ru^IMQ=3$#kpt;M; zLxdOE7ex#aE>N5=%0`pyc*P~yEEW4uLmb4`mp~pY2G0oD64(t}2WG^UU`xxEV1vq* zj3*@}wfRMv$Ap^Cg_^KIHm+`CiwEs6f=tS}^Kl$%n#9~I5o5)c-tY;cI*x4Bv*Q!8 zG{ey>?4Yj6z7F@r8v(bfxvztrTPjC64JElo(CtKGnmYbwR7*Bqsm<&jkZ%?^tX@MR z`?0W88un9Zx~{s#gfThYVRDwqGfd7P=_y_8I-U19R(*k0@z|taMfsoTz)&(&VN&*J z4r8AN{|bj-%=S}h5;On7!6_&(V&vazSTd*DvWSAqtnk}NdTL7sv~fTYF>Ue08OpXX zZEQr^J>1-RxHzVbP_~U}Ba}TR5p0CAZA=@XY#Y-?pEjnAbB+h34vsd;Ny+LgR|N+# zq1~&3M<}IWq^pAWMKj}7AuQVJrJ3KB;I_n=!6e&arP(fO8tWDzVnAFrZXqM$aLs3Z z?2JJBSIp1GG(bo4DHHNR0w=1m0l_by# zBqGR6dxdy31RU|`bE!tc-no(hG!JilP5}BsJ^(EXQ>&vnu>NHxPcS*lq?a^4g%9F? zTcL*yiG~?&9)s%?+q?;92BGF{!Df=;4Lq{;g&P2klJ1Fu@gDWs7Ns}R-QW>bds2je znpSr;J3KXbz()LWJIE{npiHX)7IIX-iK%8VMc+likOG>re4E=bOHP=G5#*jiW(RWf zyBUwTII$isxipL6bEdT`cj2BmT)dCWBj*_6VNT`XNO*`+AUyOrQUK$VFjFQ4w2V!&jhCK9k%U@@&s7X; zVKfWOnH(&n37=FRPT*xPiOxyIc0l4l_p;@#og`uZL)cv*4Cz4Uh6)44w%bWNw>zUd z$T1`+#KvSpQ9I^sz`*iBP((gbXs~Cu0~8K2Bn4}CYV1A-cW=I!gJ&pmLd^xvO`);s zcW;OW*TB?a+eTc>V5%%|Ke1P^N>iY1l>Z)ca=wMuNB=652TZs@uD{3RA(LNc@_i=1 zi6k={lIv%cLWlLg@W@siqn|}*W;~AgOSC`9OB_tcp~41Q$Pejt8IO7FMQQnT2S*?W$rNEv2C|7l4_kl6iYT~7(CJ`@ zjN7;ZCe(x7T*N6R*=`rENlOt2S>BR{@fIZCA0x)Ae~1x>$rs-sd8n9@vrIfny1!iU zF$Q}n#}%~Ny!h~D?<+BnrD}@k16^8Yiz17aZQ@bkE#6gkk;^14Q>XoC(lhAmS^|aI z*DZ?Q!%(j$Lm|3D74_QHLD5B&UBP4Mn=;mto=uw*?8OmEd+&~lzhdu2F>BZMuOeq7 z+Ahmd6*f3xmk2F|i|52f_7%B5!XV$uA(!6QWke)B2=3)zbGM439``?UuRA>qitrY( zoQAjP5(~ZKC-q-do6m^?i$h)c4WcAwm6)t^>P83sWNQZwX1$Em?DUF% z4qb`dB>pBbIF$%@7mj7&3eHJjK8g(4Ork7^GtTot;4Que^)q~)bmJfGN8PaFwt~)T zGwOQ&1($o4%<-BJt_(zL_i#AfU04pHC}`s|ZUR5!HiEE&%eKKv+R<`MNq0Heyx`W2 zTX&-_0?~_;X_f11O%xl`#qPGf$(wKF@?+~>eTz+b2BO|sZDBt5)|!pAv=81a!WAwT zcaoNUbmF^>IxloZ?gmUbP6{lhRhxj*UWgQrLS_?NkRSxpA+%(M6r7Pj5)N{&;uK`U z;cC2r?DhrSZpy&t63UQx=HHQ|)BkMBba3fo&99R(xFCGN#n@~ApXk!0aG5i~$G_I>g1Oez)zp$#KyxHTUUJ^n)xE%?$bP<2d?P;@tp2s2Ca1 zKyrJ_9!k&rhQ~ODu3sa<&M_fd4u-1p=;4P%mGyOb`ut#g`h4$gWj4x&m0Uk$B3r!A zvFVsX{N`kZApYAc_Ig?>h-3;8#ARfxAVMOtktDD0{Ehg9{j^5<0bi1LrBYFYWmRQ0&>=T-Fy<$pm{PbmLMRXxewH;!jM zKq3nM9piiM@|J6F(czC+1feS@qu~psSCEZVO>WzoMfcsH>$dA#E&?!eMHTiBMl-&6 zCzW|s*Yn&h-2HQddmR_sygW_k*$cbI8pj3tp}DvTt&&|dzrdXAtx1&XJ-picY1>bw z-C{Ytklc}Hpil|6x1CW~V<{OS-ouW!oHfayfp4W5C(*Am;Tl$gA<4NZ&Gm~t1x3f) zUNNB;Dekf8RV(VE9DafiLtvp#(ak4hp_9mzMoW -1: + continue + # Remove whitespace. + param = param.strip() + # Split key-value. + param_parts = param.split('=', 1) + # Remove quotes and unescape the value. + params[param_parts[0]] = urllib.unquote(param_parts[1].strip('\"')) + return params + _split_header = staticmethod(_split_header) + + def _split_url_string(param_str): + """Turn URL string into parameters.""" + parameters = cgi.parse_qs(param_str, keep_blank_values=False) + for k, v in parameters.iteritems(): + parameters[k] = urllib.unquote(v[0]) + return parameters + _split_url_string = staticmethod(_split_url_string) + +class OAuthServer(object): + """A worker to check the validity of a request against a data store.""" + timestamp_threshold = 300 # In seconds, five minutes. + version = VERSION + signature_methods = None + data_store = None + + def __init__(self, data_store=None, signature_methods=None): + self.data_store = data_store + self.signature_methods = signature_methods or {} + + def set_data_store(self, data_store): + self.data_store = data_store + + def get_data_store(self): + return self.data_store + + def add_signature_method(self, signature_method): + self.signature_methods[signature_method.get_name()] = signature_method + return self.signature_methods + + def fetch_request_token(self, oauth_request): + """Processes a request_token request and returns the + request token on success. + """ + try: + # Get the request token for authorization. + token = self._get_token(oauth_request, 'request') + except OAuthError: + # No token required for the initial token request. + version = self._get_version(oauth_request) + consumer = self._get_consumer(oauth_request) + try: + callback = self.get_callback(oauth_request) + except OAuthError: + callback = None # 1.0, no callback specified. + self._check_signature(oauth_request, consumer, None) + # Fetch a new token. + token = self.data_store.fetch_request_token(consumer, callback) + return token + + def fetch_access_token(self, oauth_request): + """Processes an access_token request and returns the + access token on success. + """ + version = self._get_version(oauth_request) + consumer = self._get_consumer(oauth_request) + try: + verifier = self._get_verifier(oauth_request) + except OAuthError: + verifier = None + # Get the request token. + token = self._get_token(oauth_request, 'request') + self._check_signature(oauth_request, consumer, token) + new_token = self.data_store.fetch_access_token(consumer, token, verifier) + return new_token + + def verify_request(self, oauth_request): + """Verifies an api call and checks all the parameters.""" + # -> consumer and token + version = self._get_version(oauth_request) + consumer = self._get_consumer(oauth_request) + # Get the access token. + token = self._get_token(oauth_request, 'access') + self._check_signature(oauth_request, consumer, token) + parameters = oauth_request.get_nonoauth_parameters() + return consumer, token, parameters + + def authorize_token(self, token, user): + """Authorize a request token.""" + return self.data_store.authorize_request_token(token, user) + + def get_callback(self, oauth_request): + """Get the callback URL.""" + return oauth_request.get_parameter('oauth_callback') + + def build_authenticate_header(self, realm=''): + """Optional support for the authenticate header.""" + return {'WWW-Authenticate': 'OAuth realm="%s"' % realm} + + def _get_version(self, oauth_request): + """Verify the correct version request for this server.""" + try: + version = oauth_request.get_parameter('oauth_version') + except: + version = VERSION + if version and version != self.version: + raise OAuthError('OAuth version %s not supported.' % str(version)) + return version + + def _get_signature_method(self, oauth_request): + """Figure out the signature with some defaults.""" + try: + signature_method = oauth_request.get_parameter( + 'oauth_signature_method') + except: + signature_method = SIGNATURE_METHOD + try: + # Get the signature method object. + signature_method = self.signature_methods[signature_method] + except: + signature_method_names = ', '.join(self.signature_methods.keys()) + raise OAuthError('Signature method %s not supported try one of the ' + 'following: %s' % (signature_method, signature_method_names)) + + return signature_method + + def _get_consumer(self, oauth_request): + consumer_key = oauth_request.get_parameter('oauth_consumer_key') + consumer = self.data_store.lookup_consumer(consumer_key) + if not consumer: + raise OAuthError('Invalid consumer.') + return consumer + + def _get_token(self, oauth_request, token_type='access'): + """Try to find the token for the provided request token key.""" + token_field = oauth_request.get_parameter('oauth_token') + token = self.data_store.lookup_token(token_type, token_field) + if not token: + raise OAuthError('Invalid %s token: %s' % (token_type, token_field)) + return token + + def _get_verifier(self, oauth_request): + return oauth_request.get_parameter('oauth_verifier') + + def _check_signature(self, oauth_request, consumer, token): + timestamp, nonce = oauth_request._get_timestamp_nonce() + self._check_timestamp(timestamp) + self._check_nonce(consumer, token, nonce) + signature_method = self._get_signature_method(oauth_request) + try: + signature = oauth_request.get_parameter('oauth_signature') + except: + raise OAuthError('Missing signature.') + # Validate the signature. + valid_sig = signature_method.check_signature(oauth_request, consumer, + token, signature) + if not valid_sig: + key, base = signature_method.build_signature_base_string( + oauth_request, consumer, token) + raise OAuthError('Invalid signature. Expected signature base ' + 'string: %s' % base) + built = signature_method.build_signature(oauth_request, consumer, token) + + def _check_timestamp(self, timestamp): + """Verify that timestamp is recentish.""" + timestamp = int(timestamp) + now = int(time.time()) + lapsed = abs(now - timestamp) + if lapsed > self.timestamp_threshold: + raise OAuthError('Expired timestamp: given %d and now %s has a ' + 'greater difference than threshold %d' % + (timestamp, now, self.timestamp_threshold)) + + def _check_nonce(self, consumer, token, nonce): + """Verify that the nonce is uniqueish.""" + nonce = self.data_store.lookup_nonce(consumer, token, nonce) + if nonce: + raise OAuthError('Nonce already used: %s' % str(nonce)) + + +class OAuthClient(object): + """OAuthClient is a worker to attempt to execute a request.""" + consumer = None + token = None + + def __init__(self, oauth_consumer, oauth_token): + self.consumer = oauth_consumer + self.token = oauth_token + + def get_consumer(self): + return self.consumer + + def get_token(self): + return self.token + + def fetch_request_token(self, oauth_request): + """-> OAuthToken.""" + raise NotImplementedError + + def fetch_access_token(self, oauth_request): + """-> OAuthToken.""" + raise NotImplementedError + + def access_resource(self, oauth_request): + """-> Some protected resource.""" + raise NotImplementedError + + +class OAuthDataStore(object): + """A database abstraction used to lookup consumers and tokens.""" + + def lookup_consumer(self, key): + """-> OAuthConsumer.""" + raise NotImplementedError + + def lookup_token(self, oauth_consumer, token_type, token_token): + """-> OAuthToken.""" + raise NotImplementedError + + def lookup_nonce(self, oauth_consumer, oauth_token, nonce): + """-> OAuthToken.""" + raise NotImplementedError + + def fetch_request_token(self, oauth_consumer, oauth_callback): + """-> OAuthToken.""" + raise NotImplementedError + + def fetch_access_token(self, oauth_consumer, oauth_token, oauth_verifier): + """-> OAuthToken.""" + raise NotImplementedError + + def authorize_request_token(self, oauth_token, user): + """-> OAuthToken.""" + raise NotImplementedError + + +class OAuthSignatureMethod(object): + """A strategy class that implements a signature method.""" + def get_name(self): + """-> str.""" + raise NotImplementedError + + def build_signature_base_string(self, oauth_request, oauth_consumer, oauth_token): + """-> str key, str raw.""" + raise NotImplementedError + + def build_signature(self, oauth_request, oauth_consumer, oauth_token): + """-> str.""" + raise NotImplementedError + + def check_signature(self, oauth_request, consumer, token, signature): + built = self.build_signature(oauth_request, consumer, token) + return built == signature + + +class OAuthSignatureMethod_HMAC_SHA1(OAuthSignatureMethod): + + def get_name(self): + return 'HMAC-SHA1' + + def build_signature_base_string(self, oauth_request, consumer, token): + sig = ( + escape(oauth_request.get_normalized_http_method()), + escape(oauth_request.get_normalized_http_url()), + escape(oauth_request.get_normalized_parameters()), + ) + + key = '%s&' % escape(consumer.secret) + if token: + key += escape(token.secret) + raw = '&'.join(sig) + return key, raw + + def build_signature(self, oauth_request, consumer, token): + """Builds the base signature string.""" + key, raw = self.build_signature_base_string(oauth_request, consumer, + token) + + # HMAC object. + try: + import hashlib # 2.5 + hashed = hmac.new(key, raw, hashlib.sha1) + except: + import sha # Deprecated + hashed = hmac.new(key, raw, sha) + + # Calculate the digest base 64. + return binascii.b2a_base64(hashed.digest())[:-1] + + +class OAuthSignatureMethod_PLAINTEXT(OAuthSignatureMethod): + + def get_name(self): + return 'PLAINTEXT' + + def build_signature_base_string(self, oauth_request, consumer, token): + """Concatenates the consumer key and secret.""" + sig = '%s&' % escape(consumer.secret) + if token: + sig = sig + escape(token.secret) + return sig, sig + + def build_signature(self, oauth_request, consumer, token): + key, raw = self.build_signature_base_string(oauth_request, consumer, + token) + return key \ No newline at end of file diff --git a/tweepy/oauth.pyc b/tweepy/oauth.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8d718e9f1f61841b757f93393c4364c2b1d065b8 GIT binary patch literal 25801 zcmd6QTW}oNdES{B5Woz9ultBp`?n?x>^la&Wj#dhP_G|ApUGk@df4__?`q_eW zE$42cEVx$Dtrgr&RIFZfH;ZY#)z? zW!FN_ifawJV6R)k_(SfobB~;R|K^Bmp;OsC#@wT>RpEo1FSyoTRra~oh${PC3r}BA zw*#v0Q~jW8?RUW;4SFCO^ss9kOj{3Stw&tzaN2q#YaMf~v9$GQ)_T;nUQAn$Wvwr| z)=O#Y@vQZjYn@12KbEz=0rJWD?Du)Q9*1 zZZLm>qKGZ`S32w4R}w~*rOj>|QvxaI(dqzYn(u`v(s*cTyJ!~ zA`&+iJNE*u#c12^!~j+376Al=Q8jrKtu)rwyu~0v80HPz6*i^I!Rj^_qZot_8*AQ1 zr>psPF0+dHFVA@Ma~BtGO!_n4?7Vl)pL=(9dS==?IXRE}lM~*J*@eq<*B3nW@F!~v z?|X9>y~*19-dAU9(-Ypzd)NG#`FU^7ugqS(c4c-3ZL_thE7zxIYnQwWc&|1G2A%~Q zW6*^;j}sb zcHO@=H$Q{vr!j18wsz6SEHhVUY714&iZ*ZNUEFx{mnW}W;Z&8$>ww-TeBRXDwfFtm zOP3eC%X3$zXV7?I29Ql&xH4l?VNp|8CTFisc+-S7y51PPd6W{VAfnfGc_lMF8y!{fshI{#48*=g*IG(C9@Jmx6Wx!S#X% z&0s?+pt%O^P>l}bp(gMG&fs;L&CPDmsx~=AdQEYdQ4}8*+=tH9wFS+OV=HnsmGKa& z>p>JXmVciq6IN zgJ5IpT&ICmS2woeVZ2eVL)pdk`We(Cj_(TNoFRGGZmb9Ox{y_`L&0pW@s=>v>rj04 zy1xhY917K*9TcB4{}8T-a}O2<3o-t6P2zzUmnu~4LnYL%3~Y^IpLjDN#RMMA3I7oE z8#iv8C266R!zPq8wtz+}=z8OCy!N_L<4m-lFR&9-Z)0u!os*}dlWN@gvMr8K#9XU- z=L>%JwZ_LV%Hn3Y)~b^bDU5Z4k?Sxz6tmS6CtCd>RDhT};uW(@9u*1+20n&|Gf}g# z0hQ@p_pkKqBHZ52RgH5ROF_)V{FrgmAgY_)wY6|jtGm0|i34Lic8dC1D8~*2(Q6z_ z+?B5?qvz-i!jzsQJ!OcT}anG9U0=^d!hf|$4cKg`dY=_NGE8q^Z z884mvTs0z{RIM2=Q`W;MY;z}Q24dgjrC2bw`M&f7c1o?@Cwug@cxszEfcRV;W4?i# zh*P=ZzQSm+$r}Fc)$S=0OCHf)e4a4WjwwV#Mq{}VZsc7aS(XHMyI*rjbC4w zI#V@55-njn7Lns{J;;GqZ5Moc5F;*AyFr5L91`eT-}nN-HByGdsEbQ^Pod=3>b z=N-JQ6&dlSZ@=sw7F=9(K=d6L&7g0IzG<<#a=`9nk4ZXzAtq{~z$=b7%99vmko>6D){vVZ1*9}w z3WKh=!&oQgo+tGSDUM)2=Q}X<&IMf2DHK8P4h$zud}D6bV>}wnw&LGG=X5CAcc$8V z6f6u)*m)lQY@3!2Lc?n{;)WM*L3PF}4I1Mcu*ad9Vd}zW4O>*w@m6X#rt@gDdtt*1 z(Gj*Nc6W+t7t%_*-OS^ldyiJ^wQlDg#_1+7G)sV&7tt|=D;h(Q837*_Wev#oNUZ_( zl#W^rw#3sml`IjTTZDg##m7W{r*Ovir~+SmP?Z;4u+IfAP*c$>m*0MKKY5T^DEd_tVlKh> z|KhAh+na$Wc|C$*LM95pGN_`t(QLxt6L-OG>;`u?p?a~sVc}J^ssI~!XU~es^hk{f z`}MXbENQ&IOTNpA`~zV(5H4r8C+*{Hq5;xjL#ZdCLHgHh&+Bd`1~U1k2`_N5(OiwW ziKRDn?4V0wcRgsCs*})+qXVy9tGW@q8wGJaorT*{COc0qhI2vATh6VPUH8RQ<_oSp z!p-rK454&+)P*%e`Ds*Tl&9L%e-$k~3;7gparM2Bbb@t^WkyS3q%dNJ+c4U)jsE9w z16p9C?|ws-0xMVqY&UC%pkUAEatqG8HRv7|-NTZ5IN%=caSw4jhTPJi3&ZW?Q%8#DY|=ObxWfk!j@SO0XWN+79BiPE(w@ z(SUV9h|saI9wl~m&@~5asoPj)_^2<^(OO;Z;lVn9jnEd6{axW;@nGQu^ypY|v~aL6 zTF7j+5}r=6-$fK*N;MGcO``?q@+Jvr=D4x!=aXAfwdCGzYcIG1a=>tfjcyc(_hLZ9 zv6PeKu;;#RiYlfDZOy6p^a!bToKxtbP0c^U8pXh;p(S@7%fw~?<2{!Hp`819UkIf> zk2^Ho5vuwL+(Z;}S11)Ku;2F;_8SiOppBPsNO|e?&M3`=Lc;$=cMRA%1_2=629moH zcXZ=vSi+Etts{EA$DV`F>jUma`+yq&*Q{1tcf}P7g?7Qchez|{V)NUW*7JiHu{WAB zLN{WZFqR_(0}wewtZgk^4bE$WpaJwG9o~8H;6WlNdY|`M_n7jsiqJuQajnr_t>0^` zVMFrYLMwHh^J(z=pGP5pWTP~fLrIY2rGD3#{$gV-3j9y7H)*L3Uk$btfRPC=ugm`o zJ7i>+xCp4e2ugk=QWBDW6SpQD14jxk;JKruKZtBsZ$Y`5}2d0=F)JX$W7M@NbRKvt4( zhD{SM=npX)oF=l+9G`_k$s{y3YXgrsH&iD-1pHPo-$$@f_UdLGds9aMX+-##QzG~`d%ANx@>KKgc0Qv(+c5s zB`c1o&Q?g>3!VZ&upX=>sVl|?v)Tg1wU|Nl#0E%3s>fcFEZL@Z6&;_`_I4OU)w!N5 zkXYA5-$8rgEgiHKk(+sm%dVgjIyw;#zLYG-0;JQotL_#|COH(~!z^5kEnpQ6$(Z~E zprGj9zB}gd_XY^u6Dns+l`;<~&PrGIhx&D>d&C-`G;lI@h`x(QQ3Zu74i-)nj}#6R6c!cZQ-m`=4Mu6o;Iu!@ zV+-Ab`5hSjk}xX&3@QqU^stStA|QW?Xl%jhg1xHvEv!$l7yi;&dkcf!J;lE_;0z!; z!_Iq{nUQ^3#q7|Rw!2W5Hqvy=5~3T}eJ(F7TuTx$igZ+;nwybN=YI`@MP(EdkUr&X zqy+eSryG8t9K6wjWbhfs@?}ABUo;z&B=dR+YdK>yTItV5y;3@ojNF^_%L8hu$_7h5=0KZUhWF(6iKMkQ0 z3nBFR;Eq;0a7Pdc;h*Eylml?rq)cs7cMN1xaf_iJrHDg?TsbdgtQ~mU@+~)JOGj2X zTY0kPFC|Jn&LywAZiZ1k-Gu!g#>CG_FH%IXVr=dI3ANr_0Wr^~Vvt`!#nuT`xV4uY zfR(>lydI-ihgBxZoW0yqsGJ%=1{)3;CL zgFzvH+n@)CV!rsmSdBaoO`TB*JRO!I57k&SJ=b9NaD5Z}NDCCEA*|1y z-6=B`{hJ(~2r;H4T4d(Pj*uocDQBcmIyu5o53y~J%*(IL_*9k@EZ4-H?MvjDjWjKt z@k+kR|AL3+YJ!PN#l6_M2a2V_m!S|cmL^+^&ZDRpoo$;lZ!==H=K2N>z%&~Xj_J?@ zB{Slv4_!FR0LlUsLN?j+Iq@fDY{$}iY}5aar#pF;te~}g1C>~^$rVXuCBcvaf`Qm( z(qJI>3+&q1Xli6F@YpNZYX& z|J-XrI$NXgFWJma=ihkaGuo(5M;PXGl-i0kLS?4+Fy=Y7F-7jb&f?Q3`e8@^76$kW zToL1B4za&O#S;i^@XB12OtUandm=)?!aS^$%1{+I~Zaf z<`mg7liA#yB2M`TDYebDm`OA{{g8F7#*|yLZ?eY3`|*DSkCG5LjX`-sVo17Yv}AV- zmRn*mqoO;<)BCCC>NDKX52`Ji0>xENT=5ju)V%PVAHS!+JkxD(4as?Nc zsO9}HR4BxQZgtpokH~t5@S+i7`*rQtkaa};zbUOYB12yc3S=@80McvAa|O*&4L;f$yM5I11BXrH(lxhaqi=92a{Xw5gl z4v(3BeyQ=nQP^$e3jB-Lc0>`$@pB^H-i z(D9JkMi8z6iR$lA8tr=sGCO}=$%CqDY3a~2;7Rs>ED*A7RzpAa>6(xC0!5 zJDlO7=Hr!d2@m@K`w5o}NCTPqqfZ1#-tIXm4V_|9<_sDH^b^&nM-*qM3HkmOhO>Z@ za7&Du&A?5|5Vajg;&Bzb@LDsp2ou6dO7pK0EtGQx8p>EYP+_KPih?OTi@uHm`wmUK zR4}Npi|)ZH7dd^q$=om23R4iGf@ctuRjBB9+}#sG79L4_l5UVAAPQ<CGtwZkE+Ze~9JY@TbY_x!^n72(=o=>9>I0GgXn}m7I^_@^;KnIBB3crJJ z28LpBlACIB)Qk(4M)l`Wll@13XTvg+048WmfWJjr1XH&j9XV-#l2 z08&nH;pcQ77FY&IJZb*yk4Nw<%28o@+b`eY|dx=9x?O2MF2{VI;Y&rRp5O55YR zI8u+Z@cxhTC6eE#-Rcw1{xufwvUs1x53{(S=z36a8J2Z9z^P{JW?s|!N2=)?L`U?+vUisBT$az!oHVA z$Y7O`VtF5~K{2w-73N2Jj`WoSvU!$p!$(ht9rzJRyp}$jLb48NH>yv?KPJ8Vo$e~m zaY4DjiEOUQ^+Bp5L~2PV2peh0$!jb#D1tDwh2x^Q(+#TO1lX}jjPWYsn&Q|eP*BsQ znWT+_lpNkdkHY;m_7EikoZ=JwW3v=M##a{E2SSwCgY|6$f)eIU+E1mK@@g=_UM9&VfyM zN+~LHs8r*!`azIP9y+c$KtGOwKQddzz>6UZFri}cptrZfNYsjS{tCgB-NbI`0J=_A@^c-kAjrkzKci=JjNlGf)25Kn=Tu>D5yXq z0d@gyQCgG;=U`yoq6N0Bwd}zFN3#JoH~_Q*{6n3EK~EXfv;(kxl`M7vn;T&unZpKy z`le~H=oUc5sazhlgsYZGrI~`cj4D24hieeT$Wq2>3SH)shY$avO`Y1b1DebR+hKD% z<4{xlNrGufFrMW!-;0Ko+7e6I+%+;vYM{z@QbTg6QCpFs8+ECeoJf+Qm=Oki<3kNQf=88E+jN(%47RLS@d z^^8}*ZLlr3@rs(jF=(ZSMyI|6fFkbGJ*<+R(EUDH#g6l0LjcX6$|?R|$N0t~{?DW8 z|4kG*AEb_!_{GeSzrkL=z=C>G{+$ur=1S--+ePZ&m)JF@?x%Qrfp3;kw1fM}3%J=e z!T&bi{W~N$HHRDMAvFV#g%@*WL5QR*ZlOYYd^EB7RFsBcYHk2qq$e_`sX*nZv;_l6 zcqWNQ)HlfheFn3b0JLD--cK0M;z9NaV}v9Qa#%QO0#g#4NtC3Y3Z~({r{>Ak`1EfW35JoYf?8io z-HQRflP^_d;R-X^zK$EJY6ZHDGO_|j6|R+HPqyRk*AxShq^}~-^);S}#WtQLw#PtK zp2`6?XPb8>)OH@L+39xqk$JL*X1ljxk00;LA*uQg09N!03X7_y{ZB_a*OMR}Sx6K4 ztLW%|lZAAr@JGh#F`hG#?+cyOE#Dwu&m z>Jw*=h5OGYn5jnm}pwQe2Zwq?*P!dpHGkfCFu$r;@FOOFYdxgYU}qIIDGbV0Xl6ZkDNr2 z@iRYrsW!QA-JePQa2gLv`YKI_2xnxcvIx8Jy{H9M;goU8U{9Q)I=^=AFSQ3@(@t=#K89c}FXvBr z33^)R7l{h0*e$-HTx)5o?F-#BU;b-`ZW!eV52lKS%$G6XC~kT&ZCh*2{{ZcFynriv zo;&C-W4!-H&c;#X2Y`Sl$xW_%nEcI4daoHk7WA}r;` zNwl1$^W3wYj>}G+lCCzflro{s)}pS`S@du9L5TD^=Th*TK=k$5lsu^aL3Z;nF+P)m zT4oy1w%`3leDwy&7?gbA=marBT4l`mS8x-HAHg!nd!UX82NX1TlP$1eq^Uvl7kLkk zFkNMNk6Scx%nT$4$+6*Km4yFb&}Qb&Da<4vhUN|qRAm(8N2aiRXc@$Y!Z$uk744{!#i4W>zX>t$}a)TYc%Ue~2*)AV`vz z8UgM0gK~q#C(m1a0bPk)ZJpcXPxR=%MJj;|SQht={1o{2Bmis^F)j&!-;%(#AOehG zvKqhqhfB#HUD@IfH0h|g{S<}y>pk0maX(p0U*p@RtX+fKs)c2K54FCq_1hR=FR?`f z$`xsF)W#{tJ98X=z_xF)_)!+8S$vBHEd)zu{8iTe8jGJ}!8ngk-{1c>i(h3yKgi$9 zLO!!}3io((oT=k2c0nTgge_m3YP*O&{|rXCOG&Pxl4UW#5Bmm?$0#9paTpnlz4&)v zWT0FrA1Duv45R%}xsqosrq)6TQWGQS4~Da>1+z9mZ`K08=myC}2{mtDk|P6epC0L# z-X5bfk{Fh((C@vmML+q*%3fv+hB2rPq{$PpsqK^TJb}=Namai_hth%vK@-2=(v7x4T!fx|70;K+h|xd#@4#4CfP zf*b50@N~gbNfd+b;Mp(Q5ugQq|DXIS6jg7n6VI;WbP>NE4qEmVB}dHn-M*+v$$m8h zIFG#S3Sj@AW3=aKDN^N}epcwH@_^2Hd>8A%rjN~f!cM0{SyjXB^9EH$R1%j>r#h2E zUNc+iY02hHQcd&zF`)PV8H-%${tMjZR90;lg8NS~%0Ys35S2uG^0Q6ToC69$N=;28 zt2oc3ViO(phum-hS2T@co02res89}-9u%f!aiKF!F+yir78g3xvbe)kj8qXi2sf#; zs4QAq8{LL}9Z?%9eKqr=GJh69T>5Slk|_U;weQKS2QX7AT~n!B{B$Nbg|0cCcY)&H z!~n17RL{(m?+M^XiNCQZwXDgy)b~HYZI81kO!Niv-^YNT{egn~7FY0lDEfl)k1)XJ zd*CeLo>w2Jd_XG7oVVpIPro>6o?)0;1ki^) zae?GRktZ#nr&%4;z|e~re%qEv$R!jx26sWf6^wEvr{DLd2#HmLh(R|5lVkiYQse$M z@N$9pB?9_KV}zPyVf$lgLmH}%mmUQyz;ak7}JRspZK?faC|ABRkdAk-@n8t zU*Xi`ds;e)*uIS0r-^O-^3}kz@VNHO z(68xJAbu!4bu^5TnA! zf{d8pEqo}buHeW=&>K`#g8@VOF!d3 zT4y~+A{Tl4aTYJ3Fpl9GEa{%HsDe{jypX7ep2+!T7c4^-TqFYbqvCXi#69cr0LYTozk{hoAoD24K8}n(hrn3+$^(aEkRr|qY+e*1y`^=*e zt?*}Ixw4gY7~e!D&Wfl3zjpY#p&H@HVLZSGFO8@fhRR+ne!8KM@SATa-q#A3@dI`? z{8LC~zxDH{7{vcqDD1Q}CJ$QvzrlT9#G^Xvka+ZL93r~=N+m>fPawuSQaDz!$Y$<) zt)Nq`bMl#0+Xe6Z3C6q4={W`Lgi?7l{zz4m{Wf(Io%JWP6;9#yd8rzoquSD*pN9EK zE)o~ixkQ575cy>amlABbx=_w#jGr4L*^lob22ypQ2S@P$>Vgu1PdBL_IBIl;V<;fxJKaQL(DN%Y-tqc!TuF$ML=q`)LgLT|UKS#V+Vfq4_DIV3?y6 zO*;q?-^X5@=?1KL+g{93KYA}FPxybzF1U6LIR)ffgq(mn2*tj_4_YYx3={oCI;DhS z$PU~SdG-+i4Qu5Z?AK|}^7db|__r*+$>JLi+_P5q!V^ literal 0 HcmV?d00001 diff --git a/tweepy/parsers.py b/tweepy/parsers.py new file mode 100755 index 0000000..55a5ba8 --- /dev/null +++ b/tweepy/parsers.py @@ -0,0 +1,97 @@ +# Tweepy +# Copyright 2009-2010 Joshua Roesslein +# See LICENSE for details. + +from tweepy.models import ModelFactory +from tweepy.utils import import_simplejson +from tweepy.error import TweepError + + +class Parser(object): + + def parse(self, method, payload): + """ + Parse the response payload and return the result. + Returns a tuple that contains the result data and the cursors + (or None if not present). + """ + raise NotImplementedError + + def parse_error(self, payload): + """ + Parse the error message from payload. + If unable to parse the message, throw an exception + and default error message will be used. + """ + raise NotImplementedError + + +class RawParser(Parser): + + def __init__(self): + pass + + def parse(self, method, payload): + return payload + + def parse_error(self, payload): + return payload + + +class JSONParser(Parser): + + payload_format = 'json' + + def __init__(self): + self.json_lib = import_simplejson() + + def parse(self, method, payload): + try: + json = self.json_lib.loads(payload) + except Exception, e: + raise TweepError('Failed to parse JSON payload: %s' % e) + + needsCursors = method.parameters.has_key('cursor') + if needsCursors and isinstance(json, dict) and 'previous_cursor' in json and 'next_cursor' in json: + cursors = json['previous_cursor'], json['next_cursor'] + return json, cursors + else: + return json + + def parse_error(self, payload): + error = self.json_lib.loads(payload) + if error.has_key('error'): + return error['error'] + else: + return error['errors'] + + +class ModelParser(JSONParser): + + def __init__(self, model_factory=None): + JSONParser.__init__(self) + self.model_factory = model_factory or ModelFactory + + def parse(self, method, payload): + try: + if method.payload_type is None: return + model = getattr(self.model_factory, method.payload_type) + except AttributeError: + raise TweepError('No model for this payload type: %s' % method.payload_type) + + json = JSONParser.parse(self, method, payload) + if isinstance(json, tuple): + json, cursors = json + else: + cursors = None + + if method.payload_list: + result = model.parse_list(method.api, json) + else: + result = model.parse(method.api, json) + + if cursors: + return result, cursors + else: + return result + diff --git a/tweepy/parsers.pyc b/tweepy/parsers.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b74926db01a96b961d2f19ca35b241798fef2ce3 GIT binary patch literal 4009 zcmcIn%WfP+6usR&Gk(OzA&*Inf@l&VO{64d!-kh2!9f8rL~X>wLK?NFYvLaFbdS1f za0WRW60zb3*a2I_pRi)dZ}0(d&h2^FAz=}lv8%dXee2e__nuoF{I$C5{r=PEU77p> zJl{jHKcGtR?@1;y@MPN)k9}VMIMQKwY?xo zL(GDXF3d(3C0U$~F3m=lBv}@-q-#&jMwca7k*(G$6iWZbujQfTR#1IXBqqBPcU@7A ztI-#^XfP~F*RgnJrf-Y9np(!xXZywsH_H<94Rr6tr8VX3?38;8#lD2f2#<^t;~?8U zabP2mXshL8x=Wx~_|k0@OZiUZJG|sUT`sb}qFN3oKiw^KyCy7+9TvGY;V>R&MVy3j zo?y_8%6vLM%G}M_n~_f0Fb>@aXTqA;h20`|ahlshZ-hzgVim?+x}(w-rJYG>6=m2i zaucS#FfUvGy*XH}QE2Pds>vrMp62Adfm_#9K@wl$WbD6BC01k}XEfTkYvB^nq}dR2}jl>^Q|g`BZ8(TX#tW@y5}1bpSRM*!g*$=O`1*(mZvY&MVwZ z{n7h(=<}!t{!t~UZe?9E%?X%T&2;&Y8q|h`$DLtuK<+t&1s9x@lc|qfg}@0AEp=WR zJEHqVWayK1CbGcGD3%z)nS!N2o%f@yPutJ(r)yX`^QXhiJ#(fcP_L@ft7tmqG$x9f zdl5B-^fae6lN}&rG#^aiRb+{@`Uzj}Wa*A#e*(^@VBUcA!%jz@Am*yt&uEOB0=@e9 z_vnC+Jb4(%7x@*bIbZgcrF=`ghc)qBz@&6glO~oRL*<@)2~X+Qq+e%B=ZkeYs7v{y zfY=WL0jEFm*#%hW?lrVso3}BDsdYYTvflIMp}u#3{EtY$@Ro)p)ot4l^_@7)Ofsif z+HlInZ-!T`CX^~?xizdudio|UMz%8rz9QGLh3Fc zMi8GfrPWlm8{5vF8S7MPkwRUZca3^Ml6GB-Fh3Ex5jAqOw$YnMZcJitR|!^SPLl2M z7)OKWb!=kKpb{_en%DGXUK3$E3O#9>nqvE&#?n5VNEmPS;sIyD;N$g)V?ChPPjH(yGx z{Pk6|j%gN({}4qpt9Ywbx;w%=e`0!kT}UAMjj*>UWmDCz&d>*C=o}bC`RFz9QBC^WoN#MHQNKXAz z+FjvFZr_mnypBUMmK_==+x;QnD>Rg{8WYMQoNarVbrCAkAzTl)z|>CpDsw^f%qB0VN>i1Q841xvE@PzWUH^#eEVXV* zXx1RzpvyRphw0NuOQq-ml)j=%6v_a@#UB^eV>$|z{TM~bom{mhHE$gbbHP84SYBfs zc^Ccjegn+ae)6tx1#iwnd9I4Kp69j}g^;*OeHb(if2G!LF@!ZVY(#Qbv?rrVhE)zBX2nk5be&#rBVJ1?i-l;Nfbx2UcpK{BLjEpoZ@I;34}U@P JY;&b~=^vXAH{1XK literal 0 HcmV?d00001 diff --git a/tweepy/streaming.py b/tweepy/streaming.py new file mode 100755 index 0000000..b17b4f9 --- /dev/null +++ b/tweepy/streaming.py @@ -0,0 +1,319 @@ +# Tweepy +# Copyright 2009-2010 Joshua Roesslein +# See LICENSE for details. + +import logging +import httplib +from socket import timeout +from threading import Thread +from time import sleep +import ssl + +from tweepy.models import Status +from tweepy.api import API +from tweepy.error import TweepError + +from tweepy.utils import import_simplejson, urlencode_noplus +json = import_simplejson() + +STREAM_VERSION = '1.1' + + +class StreamListener(object): + + def __init__(self, api=None): + self.api = api or API() + + def on_connect(self): + """Called once connected to streaming server. + + This will be invoked once a successful response + is received from the server. Allows the listener + to perform some work prior to entering the read loop. + """ + pass + + def on_data(self, raw_data): + """Called when raw data is received from connection. + + Override this method if you wish to manually handle + the stream data. Return False to stop stream and close connection. + """ + data = json.loads(raw_data) + + if 'in_reply_to_status_id' in data: + status = Status.parse(self.api, data) + if self.on_status(status) is False: + return False + elif 'delete' in data: + delete = data['delete']['status'] + if self.on_delete(delete['id'], delete['user_id']) is False: + return False + elif 'event' in data: + status = Status.parse(self.api, data) + if self.on_event(status) is False: + return False + elif 'direct_message' in data: + status = Status.parse(self.api, data) + if self.on_direct_message(status) is False: + return False + elif 'limit' in data: + if self.on_limit(data['limit']['track']) is False: + return False + elif 'disconnect' in data: + if self.on_disconnect(data['disconnect']) is False: + return False + else: + logging.error("Unknown message type: " + str(raw_data)) + + def on_status(self, status): + """Called when a new status arrives""" + return + + def on_exception(self, exception): + """Called when an unhandled exception occurs.""" + return + + def on_delete(self, status_id, user_id): + """Called when a delete notice arrives for a status""" + return + + def on_event(self, status): + """Called when a new event arrives""" + return + + def on_direct_message(self, status): + """Called when a new direct message arrives""" + return + + def on_limit(self, track): + """Called when a limitation notice arrvies""" + return + + def on_error(self, status_code): + """Called when a non-200 status code is returned""" + return False + + def on_timeout(self): + """Called when stream connection times out""" + return + + def on_disconnect(self, notice): + """Called when twitter sends a disconnect notice + + Disconnect codes are listed here: + https://dev.twitter.com/docs/streaming-apis/messages#Disconnect_messages_disconnect + """ + return + + +class Stream(object): + + host = 'stream.twitter.com' + + def __init__(self, auth, listener, **options): + self.auth = auth + self.listener = listener + self.running = False + self.timeout = options.get("timeout", 300.0) + self.retry_count = options.get("retry_count") + # values according to https://dev.twitter.com/docs/streaming-apis/connecting#Reconnecting + self.retry_time_start = options.get("retry_time", 5.0) + self.retry_420_start = options.get("retry_420", 60.0) + self.retry_time_cap = options.get("retry_time_cap", 320.0) + self.snooze_time_step = options.get("snooze_time", 0.25) + self.snooze_time_cap = options.get("snooze_time_cap", 16) + self.buffer_size = options.get("buffer_size", 1500) + if options.get("secure", True): + self.scheme = "https" + else: + self.scheme = "http" + + self.api = API() + self.headers = options.get("headers") or {} + self.parameters = None + self.body = None + self.retry_time = self.retry_time_start + self.snooze_time = self.snooze_time_step + + def _run(self): + # Authenticate + url = "%s://%s%s" % (self.scheme, self.host, self.url) + + # Connect and process the stream + error_counter = 0 + conn = None + exception = None + while self.running: + if self.retry_count is not None and error_counter > self.retry_count: + # quit if error count greater than retry count + break + try: + if self.scheme == "http": + conn = httplib.HTTPConnection(self.host, timeout=self.timeout) + else: + conn = httplib.HTTPSConnection(self.host, timeout=self.timeout) + self.auth.apply_auth(url, 'POST', self.headers, self.parameters) + conn.connect() + conn.request('POST', self.url, self.body, headers=self.headers) + resp = conn.getresponse() + if resp.status != 200: + if self.listener.on_error(resp.status) is False: + break + error_counter += 1 + if resp.status == 420: + self.retry_time = max(self.retry_420_start, self.retry_time) + sleep(self.retry_time) + self.retry_time = min(self.retry_time * 2, self.retry_time_cap) + else: + error_counter = 0 + self.retry_time = self.retry_time_start + self.snooze_time = self.snooze_time_step + self.listener.on_connect() + self._read_loop(resp) + except (timeout, ssl.SSLError), exc: + # If it's not time out treat it like any other exception + if isinstance(exc, ssl.SSLError) and not (exc.args and 'timed out' in str(exc.args[0])): + exception = exc + break + + if self.listener.on_timeout() == False: + break + if self.running is False: + break + conn.close() + sleep(self.snooze_time) + self.snooze_time = min(self.snooze_time + self.snooze_time_step, + self.snooze_time_cap) + except Exception, exception: + # any other exception is fatal, so kill loop + break + + # cleanup + self.running = False + if conn: + conn.close() + + if exception: + # call a handler first so that the exception can be logged. + self.listener.on_exception(exception) + raise + + def _data(self, data): + if self.listener.on_data(data) is False: + self.running = False + + def _read_loop(self, resp): + + while self.running and not resp.isclosed(): + + # Note: keep-alive newlines might be inserted before each length value. + # read until we get a digit... + c = '\n' + while c == '\n' and self.running and not resp.isclosed(): + c = resp.read(1) + delimited_string = c + + # read rest of delimiter length.. + d = '' + while d != '\n' and self.running and not resp.isclosed(): + d = resp.read(1) + delimited_string += d + + # read the next twitter status object + if delimited_string.strip().isdigit(): + next_status_obj = resp.read( int(delimited_string) ) + self._data(next_status_obj) + + if resp.isclosed(): + self.on_closed(resp) + + def _start(self, async): + self.running = True + if async: + Thread(target=self._run).start() + else: + self._run() + + def on_closed(self, resp): + """ Called when the response has been closed by Twitter """ + pass + + def userstream(self, stall_warnings=False, _with=None, replies=None, + track=None, locations=None, async=False, encoding='utf8'): + self.parameters = {'delimited': 'length'} + if self.running: + raise TweepError('Stream object already connected!') + self.url = '/%s/user.json?delimited=length' % STREAM_VERSION + self.host='userstream.twitter.com' + if stall_warnings: + self.parameters['stall_warnings'] = stall_warnings + if _with: + self.parameters['with'] = _with + if replies: + self.parameters['replies'] = replies + if locations and len(locations) > 0: + assert len(locations) % 4 == 0 + self.parameters['locations'] = ','.join(['%.2f' % l for l in locations]) + if track: + encoded_track = [s.encode(encoding) for s in track] + self.parameters['track'] = ','.join(encoded_track) + self.body = urlencode_noplus(self.parameters) + self._start(async) + + def firehose(self, count=None, async=False): + self.parameters = {'delimited': 'length'} + if self.running: + raise TweepError('Stream object already connected!') + self.url = '/%s/statuses/firehose.json?delimited=length' % STREAM_VERSION + if count: + self.url += '&count=%s' % count + self._start(async) + + def retweet(self, async=False): + self.parameters = {'delimited': 'length'} + if self.running: + raise TweepError('Stream object already connected!') + self.url = '/%s/statuses/retweet.json?delimited=length' % STREAM_VERSION + self._start(async) + + def sample(self, count=None, async=False): + self.parameters = {'delimited': 'length'} + if self.running: + raise TweepError('Stream object already connected!') + self.url = '/%s/statuses/sample.json?delimited=length' % STREAM_VERSION + if count: + self.url += '&count=%s' % count + self._start(async) + + def filter(self, follow=None, track=None, async=False, locations=None, + count=None, stall_warnings=False, languages=None, encoding='utf8'): + self.parameters = {} + self.headers['Content-type'] = "application/x-www-form-urlencoded" + if self.running: + raise TweepError('Stream object already connected!') + self.url = '/%s/statuses/filter.json?delimited=length' % STREAM_VERSION + if follow: + encoded_follow = [s.encode(encoding) for s in follow] + self.parameters['follow'] = ','.join(encoded_follow) + if track: + encoded_track = [s.encode(encoding) for s in track] + self.parameters['track'] = ','.join(encoded_track) + if locations and len(locations) > 0: + assert len(locations) % 4 == 0 + self.parameters['locations'] = ','.join(['%.2f' % l for l in locations]) + if count: + self.parameters['count'] = count + if stall_warnings: + self.parameters['stall_warnings'] = stall_warnings + if languages: + self.parameters['language'] = ','.join(map(str, languages)) + self.body = urlencode_noplus(self.parameters) + self.parameters['delimited'] = 'length' + self._start(async) + + def disconnect(self): + if self.running is False: + return + self.running = False + diff --git a/tweepy/streaming.pyc b/tweepy/streaming.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2ab45ac96ce9185d08269dd67eac0e906c911d69 GIT binary patch literal 11699 zcmdT~PjDPpT7TU$n$gIke~u+vYsYRoQKHSpk{!r8WSw=qjzh9Z6mQE(c2&DOX-_{% zE%kJd`}J6sv0#bKzk)q;;KE_KbD$_LTLqk`EsEm60jhvKKouvbTDVfc@Atj#nUO44 zj1>+@>ZkYjd*A!s_rJUHALC=rU;h5bZB_hK@c&Ib=I>C1_;XaO)M~%(s8vV$T@|~k zQ&FoG<#1j_#Z}cAQL7`iT~%>Sb?R!hZrgw}s#Zs3?TCsSsxzim$Lv^5#Ya@9saBh| zU03nA>P)EB3ELi3@uccZsnsdfnO3XQ%F$Em0nj?C!iHLXMulTaA5&| z85K6m-tm6#EKjtuFoDxW|BavJ1qao<&LZmQw3pkC8ai%nWI70KdjxG0Yuz0TujE1A zGqz2#UA^(HZH#g4Hs)T>vNT(8(afh&M4fJ$B%vEDRkl24(xm&H11r=e~oX*cc} zLs%CVFXqH?B?k(f_al?*L}zXE$PYn&1CQB8p+N|oyh+kN#QjuwL{MS1szO&PBfK8= z)KplNN?nB`Qo)7Pq|#7fT`FTL9F@uu5|^t0c}yzf1$}WZ{>s8Q_HGlC^0`KgW>MUC zRJW;exrqBpwS;3yZD}Lz=%par3Znep z(p%bWG30@ce zpzM&&LhxV4uHcMKhg|Z&QM)ebf8SMGHI==ua#w9u6qshS%FYL_+O^FQb-%*-c};DN zs_dSF%1W~6fW?c&KV${XIWYU&E|vpGUEPNiRMi~+ji@=m+Y8`d$^Qj_pFDo|(F1q? zL&{9Cxb8d_^~QmV4dtv@e90lQV{(8|9H0SA_aF5drLdUamib3y{@4L?kE^_?Hbq>a zMl7L^1EK>hVFlOxIWNSr`u2uSye!!E!XOX4M|iTh)hJDdIQ~NjKo*4>EQSRgoo}R} z7p-|aX%B*6HpqsZAn9R;9d9E@!g#>-!sH^Yvg4xX>%5mG-rGTJ0FaWFb_;Ryu+(eE zDF%k|CQ>TgN)rQndrPBg_v)-}fNKoJjW6cOWyO zal;4_nYTI+*tR#Txxk1w6}6{dl;DlAJKRpC0j|loq!Os%*%JW`gL!4VG>hw z^1i5h?i8m%Xnd+Zf!GZ)qebWCRD2Mk0#VKtf|UKmS*Y1C%OXsxg2q0aizyauYH68L z;wxv5)Ae;I2zz!~DN)AIa0}d5l!E9`a7Bw;A!lI4=ZSoprb8%3T;Tv zkb}z+4|_WQO8|Erk5z=9i|9-H5ly^aV&x|E^xd}Z<`6(HZMS=wS=^7Z@JN5wVR47< zusl|7`|i*=$hmwTLvUW=UV>Y`^^!D?U{MR2doW!dY@=L6aT-FkoJUEpLy%9OOn%rS z`KJKn1A^r>tIxfTGnlmZ7~~rfy7CD7@8FAJn`g1{x9~h6o}|m8cv^kXS4RJ*e0~}j zeDvf9zxW8kqNW1j?g5Y7i4KrU>iR=70&n&^pNB6T6zi^<^N--0q{;bLE?g*;2t9Xe zGik%L4Hta6K-SZxvB-~bLu;8h|FDM;)l4Hm&HX*XF1VUPSw7d;zt_FpPkigq4jERD zF_7^P1r^*)U|*<$l^Sr6|+>>c=Ox< zeCaM2nBvmT_F?Z=6ylCpRzEBw_#TL(@mL0^6%NNJdRna{=;&5Uyp>j~lZL&RO>)F? znrNRq=~L7DFR-A!@`;bAN}qbjr)>Ka82>9QUS{E;7@#e+r;E{iV##RHFhHxh%}S%u zINPZ1{S{)Zh_l$nH2I-<_y!*H$0%@mn~_+RK;j{q2!L(e3!mS{y$FD9+=~F%#=QuD zZQMJmG(z8r!5^C#MGTDO1!CX{nSe;xWhBf|1j7ik;a%Gyq#Q6*Lg+&)Dgf0~50Uzq zRH39HkisSJ(GsF4 z92)@+J0)6Tr^HO`L6l>sNgz~AL-b-hL6>^DR$?8lMQ)%`c1jGxPNWBpl%0r2>~drX z#>-AbCAJe3nJhb-W#^R2WoGt|Dx4NnCsYEO9xduQmf(y)c&c3T48y+eh!ffVY34aN z0cjswImRYKJ#@}U9f+ZhTo3j5F_3WCN1@hPs6Wl#6os7WJljFc+Jom$Ep6LWP0!Wa#)mvLb zIAWrEI{JI~;5KH_@b+vBA__``mo@R>HxStC%q$!y4Dw6R%QwU{l<5L7Lozq(B?fsjhzsBmQd~CKfldUJ-G%H*`l! zsz(AG|1#&%K@@T$@_~YaCO5L|RvPa3KgStgMX^9g)*F9=wXdU)gQVgIn}G&;O65cC zm*KIHoQC|)I7eN^80GI#r|C8jkDQRvrcEE!v5K+fHLm_!7%rwSz<OMRoU-J1*%D@t%l0JTS)fih}y#Fch!wVshvn7eQZ)uTdvA} zhm2ua9a}-E5!NUK#K8v1Ng0KxqVthac8;k{h$n`iAJBwUqJ%tFl|!MOpzKKv%F0d= zb+S1nSrS?CpyJ4kY0llnq!G1=G;CdMRe+vg`;6LzNJ>Am7&DTvdD-E4puC_~E`cV+ zx`rI*js8bKkBI?~XfQAG$?c1(3fales?Bq14cVwMl|6Kw-5R!hPTj8+BhZ`K85hmX=T$oG zLKWuo97PjbGiie~R?|+wWy_JV4>o;)o01lQ@S0l=R7>)_XS;9-FA8?Wp-&5T#i8v1 zo>j+rriJp{rE`9{VVp+M7np{*8a^RvE{3{r;dWm92RToFMMBTn~^X83fHXVyV*zQh^%Knw1 z0jdkpFF}_nA?ie{c;4m!p%!@?s~7d{o;JDviyWj*fO0Iev^Ld2&12mk`t*WrC%Eg= z|FG(cr?lNsB1+P~$}y@3QDx#|Suc#K*cMZQtOb{6FcOamJi;=$mv-I#ty9f806aa$Xa) zY3GFdDppmYk*gTxoN;H$IqFQhjYr z;DB0PdmBqDSXyq9{io~<|4r`7DX)gSxomhrW1>_Ll;a9UQOMnh>>%1=Za6F$Ik4GW z_|%d0ks=dD>xdUCSQRC?AK-tPP_-);NO&tT-{&CrYl~Ne#1#|2I)nqnw<$6dif@$Y zyLgXh1N!t9bTNLUcuH@C36#IWV@?s0J6Eka^UgC)4U#ht-k+!(g->z}o=BhDd7x`t zL2n6^Ay*5xXA5NV!YAR?IQD9sM4V!KTYO`?BFw!kjwT#DC_bC}eG34s9wsULPYIuV zD_-D0AfzuaJIUd725-PRng7Clc*6=FSIwFG+&mqfQg2Uo058x=1&;R)1~26Ba>HJ= zcWcMHS!56P2jxC5|Iqii0U-Z|r^v?$SN8I?*V;T2%?k-B*Y*yG3$DgJ!Eg8~Z%^(J zQeIr3NY7`EC@Odfa3?tl0hmY=13XN<$rAn0bqHrKLvkSA*=FfUqPK2)A>TisL>6;L zZSRQ6{+TYL6-)$sbUcT9#?&l(U1k1=wiD>U-gFcf{Y>pjPzkvA$A$n*2mn|`3(!Jt!GFbh zv1Lu%6=7uh0hW>RDt#S65Pg=u_!`y0@WQq+j@smAaL5UiLGTzCrP1JopvNBoA84 zqRrA8UPi)6){kLGviS>=z<6WcSVOj#U;G^+ARHohOiIWd%k|1ixz9si68WJ-b`+}3 zOrS&IO6(QYoY@0R7jb>MtFYv@?t+k;!tuW*_e)r`hulbc;9HXXU}`@Jed&W*8KRUN znyQr00;g#bbqwtRQU8DO_LqVB9%7q-pFtf=Yl#}4jTs zMpv3XfdDIQJbVKAVC;U=`UL+qw8xRY-+?b)a9q#?WNO-8eE;)-1G8TA7XCwBWj)94 z08@Y%osNSj+G1YeCfHmSJ~@*EwUC{kEFn!Pp*}u@5xQFi&9}59EEr3L5C8bkodZGo%kh z?TeGf%z<=@6*%m%{8@ne8V|(tDanE?FOYM6p3q-n@l6)uynV`Az~UnowD|r83(03( zVC?~mUt{s>EPjK<4_R=h0lHFq%2@UyAi*V4;!)R-qiM`FuHdEKT%(TCnVFevH0yYr znK^lN;h*LH@1qd*H)(sTctIgfvHi*uZl+IKOGL$VqAxW6m|bK{`wi$K-g4@=__~U$ zT6E(@i~Zu1f?V`TR?HWtdZ@RTM_5m4TlbjnRzzk5Hyb_3i%Xx!{6YvOzVv1Le)j9c a$M97wlVDTw+k~TwUw77NxV6gcxBmw#vWG7K literal 0 HcmV?d00001 diff --git a/tweepy/utils.py b/tweepy/utils.py new file mode 100755 index 0000000..7c2d498 --- /dev/null +++ b/tweepy/utils.py @@ -0,0 +1,60 @@ +# Tweepy +# Copyright 2010 Joshua Roesslein +# See LICENSE for details. + +from datetime import datetime +import time +import htmlentitydefs +import re +import locale +from urllib import quote +from email.utils import parsedate + + +def parse_datetime(string): + return datetime(*(parsedate(string)[:6])) + + +def parse_html_value(html): + + return html[html.find('>')+1:html.rfind('<')] + + +def parse_a_href(atag): + + start = atag.find('"') + 1 + end = atag.find('"', start) + return atag[start:end] + + +def convert_to_utf8_str(arg): + # written by Michael Norton (http://docondev.blogspot.com/) + if isinstance(arg, unicode): + arg = arg.encode('utf-8') + elif not isinstance(arg, str): + arg = str(arg) + return arg + + + +def import_simplejson(): + try: + import simplejson as json + except ImportError: + try: + import json # Python 2.6+ + except ImportError: + try: + from django.utils import simplejson as json # Google App Engine + except ImportError: + raise ImportError, "Can't load a json library" + + return json + +def list_to_csv(item_list): + if item_list: + return ','.join([str(i) for i in item_list]) + +def urlencode_noplus(query): + return '&'.join(['%s=%s' % (quote(str(k), ''), quote(str(v), '')) \ + for k, v in query.iteritems()]) diff --git a/tweepy/utils.pyc b/tweepy/utils.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ed34bc12288c18350098eda4be151d3a50a469e0 GIT binary patch literal 2258 zcmbtVTW=dh6h5QW@gWKZd?0jy=DIX^M`>!ASG(cs(yJ?9lWa-yhV&X?e_gU=$ycPe0(nV($H`4e z8p^aJSypCMk`-mvBxx$ME=fz74M|p&*`zN=`)lws{TIJ|1J19|?butN=JqhCmY}*A zm)-`k32_ux&T`p+oA}i5yvE~pKs;_E??tYG)5z{0f)_C|hiL;;*)ACAQW=h=!TBmJ zPWEY0;vR$RoR+!m#MOD6`d>Q-)}8xu)bXd4jT6`LKU+JRbjCi-+>_D7Z(xUd*S|qQ zjpKX7%)G5bQg8Rbz)##~nkzaS$vw*8Y6P%%w9n;-KqT@iB~|vN!tk^W(<1QqxF51r2>fc-$S} zeWCCaKzA8hH7Uf{$0rKW`MC0)TP&dB?}Jmt$_wD)KC(Hy2idUUr?nX#T4Rdv3lLc8H2L8w3YAsvJZ!z_wj=Jd=FicXcMv6#5JJA z>Z9Wfg#%!jw23|uP8rB=ilNjIo%5DdzlbqrZb#Z-znIMIUhO-&ACsuYDTO8G&Bb`O zX2dl(53TT9gef4$IN-Q2a*U`>8{^jWpvNxFN0~iyW#K-dr*ZM6-_6Q6*^PIp-ObXU zs<@hH*t>VJ#IJ&UmygQIe_K_c>>V!kERp0aE>6lP+To!xKZbUp%KHchz)QTTKlhd= zI=UmKX4+`ynIj}=T6-{x zJetpsvobBZtc0NAgiXE8`&sI|u1yz$%kc6j7TCaDK7;5OVlj=-Wh~?sJX%%XgSia| zgYa79;Oz_f5ha8+L%&7haig^qBIo2x6A^3B3?sQahkt8|KGf!W;J$j`(9eKPZ@Qm> z_$l^^_+6KA3W1@EI)5(4wwh>A&Xry+6dDfLag~Mc?ib}K8@m_KP2R> z7WEs$d29=x`blDk4$;8y50fG*2XPiyoQz^u6k6jpkJBtvvVQrxiknT--6Xj~@+ryZ tBo9I6=#=|_FCr8BGB1;HW?!*(9beEkYtyT3+O=lAxzt>4wwfEw`hP6Ezsvvt literal 0 HcmV?d00001 diff --git a/twitter.ini b/twitter.ini new file mode 100644 index 0000000..1f14349 --- /dev/null +++ b/twitter.ini @@ -0,0 +1,5 @@ +[Twitter] +consumer_key = +consumer_secret = +access_key = +access_secret =