Skip to content
This repository
Browse code

Merge pull request #55 from lhagan/40-python-api-improvements

Easier interface for multiple items (stories, feeds) in Python API
  • Loading branch information...
commit 332d85db91ec45cab70a667646eda2b3860229b7 2 parents c55853b + 04e4075
Samuel Clay authored May 24, 2012

Showing 1 changed file with 25 additions and 19 deletions. Show diff stats Hide diff stats

  1. 44  api/newsblur.py
44  api/newsblur.py
... ...
@@ -1,5 +1,6 @@
1 1
 # Original API work by Dananjaya Ramanayake <dananjaya86@gmail.com>
2 2
 # Retooled by Samuel Clay, August 2011
  3
+# Modified by Luke Hagan, 2011-11-05
3 4
 
4 5
 import urllib, urllib2
5 6
 import cookielib
@@ -95,9 +96,10 @@ def favicons(self, feeds=None):
95 96
         Used when combined with /reader/feeds and include_favicons=false, so the feeds request contains far less data. 
96 97
         Useful for mobile devices, but requires a second request. 
97 98
         '''
98  
-        return {
99  
-            'feeds': feeds
100  
-        }
  99
+        data = []
  100
+        for feed in feeds:
  101
+            data.append( ("feeds", feed) )
  102
+        return data
101 103
 
102 104
     @request()
103 105
     def page(self, feed_id):
@@ -166,32 +168,34 @@ def starred_stories(self, page=1):
166 168
             'page': page,
167 169
         }
168 170
 
169  
-    @request('rewader/river_stories')
  171
+    @request('reader/river_stories')
170 172
     def river_stories(self, feeds, page=1, read_stories_count=0):
171 173
         '''
172 174
         Retrieve stories from a collection of feeds. This is known as the River of News.
173 175
         Stories are ordered in reverse chronological order.
174 176
         `read_stories_count` is the number of stories that have been read in this
175 177
         continuation, so NewsBlur can efficiently skip those stories when retrieving
176  
-        new stories.
  178
+        new stories. Takes an array of feed ids.
177 179
         '''
178  
-        return {
179  
-            'feeds': feeds,
180  
-            'page': page,
181  
-            'read_stories_count': read_stories_count,
182  
-        }
  180
+        
  181
+        data = [ ('page', page), ('read_stories_count', read_stories_count) ]
  182
+        for feed in feeds:
  183
+            data.append( ("feeds", feed) )
  184
+        return data
183 185
     
184 186
     @request('reader/mark_story_as_read')
185  
-    def mark_story_as_read(self, feed_id, story_id):
  187
+    def mark_story_as_read(self, feed_id, story_ids):
186 188
         '''
187 189
          Mark stories as read.
188 190
             Multiple story ids can be sent at once.
189 191
             Each story must be from the same feed.
  192
+            Takes an array of story ids.
190 193
         '''
191  
-        return {
192  
-            'feed_id': feed_id,
193  
-            'story_id': story_id,
194  
-        }
  194
+        
  195
+        data = [ ('feed_id', feed_id) ]
  196
+        for story_id in story_ids:
  197
+            data.append( ("story_id", story_id) )
  198
+        return data
195 199
 
196 200
     @request('reader/mark_story_as_starred')
197 201
     def mark_story_as_starred(self, feed_id, story_id):
@@ -277,13 +281,15 @@ def delete_folder(self, folder_to_delete, in_folder):
277 281
         }
278 282
     
279 283
     @request('reader/mark_feed_as_read')
280  
-    def mark_feed_as_read(self, feed_id):
  284
+    def mark_feed_as_read(self, feed_ids):
281 285
         '''
282 286
         Mark a list of feeds as read.
  287
+        Takes an array of feeds.
283 288
         '''
284  
-        return {
285  
-            'feed_id': feed_id,
286  
-        }
  289
+        data = []
  290
+        for feed in feed_ids:
  291
+            data.append( ("feed_id", feed) )
  292
+        return data
287 293
 
288 294
     @request('reader/save_feed_order')
289 295
     def save_feed_order(self, folders):

0 notes on commit 332d85d

Please sign in to comment.
Something went wrong with that request. Please try again.