MobileBlur

Diff
Login

Differences From Artifact [9e60ae12cf]:

To Artifact [8c37485973]:


     1      1   #!/usr/bin/python
     2      2   
     3      3   """newsblur.py - An API wrapper library for newsblur.com"""
     4      4   
     5         -import cookielib
     6      5   import simplejson
     7         -import urllib
     8         -import urllib2
            6  +
            7  +import requests
     9      8   
    10      9   __author__ = 'Dananjaya Ramanayake <dananjaya86@gmail.com>, spiffytech <spiffytechgmail.com>'
    11     10   __version__ = "0.1"
    12     11   
           12  +nb_url = "http://www.newsblur.com/"
           13  +cookies = None
    13     14   
    14         -# Set up cookie handling so we can auth with the Newsblur API
    15         -cj = cookielib.LWPCookieJar()
    16         -opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
    17         -urllib2.install_opener(opener)
    18         -
    19         -nb_url = "http://newsblur.com/"
    20         -
           15  +from gluon import current
           16  +print current.db
    21     17   
    22     18   def login(username,password):
    23     19       '''
    24         -    
    25     20       Login as an existing user.
    26     21       If a user has no password set, you cannot just send any old password. 
    27     22       Required parameters, username and password, must be of string type.
    28         -    
    29     23       '''
           24  +
    30     25       url = nb_url + 'api/login'
    31         -    params = urllib.urlencode({'username':username,'password':password})
    32         -    results = urllib2.urlopen(url,params).read()
    33         -    return simplejson.loads(results)
           26  +    results = requests.post(url, data={"username": username, "password": password})
           27  +    global cookies
           28  +    cookies = results.cookies
           29  +    return simplejson.loads(results.content)
    34     30   
    35     31   def logout():
    36     32       '''
    37         -    
    38     33       Logout the currently logged in user.
    39         -    
    40     34       '''
           35  +
    41     36       url = nb_url + 'api/logout'
    42         -    results = urllib2.urlopen(url).read()
    43         -    return simplejson.loads(results)
           37  +    results = requests.get(url, cookies=cookies)
           38  +    return simplejson.loads(results.content)
    44     39   
    45     40   def signup(username,password,email):
    46     41       '''
    47         -    
    48     42       Create a new user.
    49     43       All three required parameters must be of type string.
    50         -    
    51     44       '''
           45  +
    52     46       url = nb_url + 'api/signup'
    53         -    params = urllib.urlencode({'signup_username':username,'signup_password':password,'signup_email':email})
    54         -    results = urllib2.urlopen(url,params).read()
    55         -    return simplejson.loads(results)
           47  +    payload = {'signup_username':username,'signup_password':password,'signup_email':email}
           48  +    results = requests.post(url, data=payload, cookies=cookies)
           49  +    return simplejson.loads(results.content)
    56     50   
    57     51   def search_feed(address,offset=1):
    58     52       '''
    59     53       
    60     54       Retrieve information about a feed from its website or RSS address.
    61     55       Parameter address must be of type string while parameter offset must be an integer.
    62     56       Will return a feed.
    63     57       
    64     58       '''
    65         -    url = nb_url + 'rss_feeds/search_feed?%s'
    66         -    params = urllib.urlencode({'address':address,'offset':offset})
    67         -    results = urllib2.urlopen(url % params).read()
    68         -    return simplejson.loads(results)
           59  +
           60  +    url = nb_url + 'rss_feeds/search_feed'
           61  +    payload = {'address':address,'offset':offset}
           62  +    results = results.get(url, data=payload, cookies=cookies)
           63  +    return simplejson.loads(results.content)
    69     64   
    70     65   def feeds(include_favicons=True,flat=False):
    71     66       '''
    72         -    
    73     67       Retrieve a list of feeds to which a user is actively subscribed.
    74         -        Includes the 3 unread counts (positive, neutral, negative), as well as optional favicons.
    75         -
    76         -        '''
           68  +    Includes the 3 unread counts (positive, neutral, negative), as well as optional favicons.
           69  +    '''
    77     70       
    78     71       url = nb_url + 'reader/feeds'
    79         -    params = urllib.urlencode({'include_favicons':include_favicons,'flat':flat})
    80         -#    print url + " " + url % params
    81         -    results = urllib2.urlopen(url).read()
    82         -    return simplejson.loads(results)
           72  +    payload = {'include_favicons':include_favicons,'flat':flat}
           73  +    results = requests.get(url, data=payload, cookies=cookies)
           74  +    return simplejson.loads(results.content)
    83     75   
    84     76   
    85     77   def favicons(feeds=[1,2,3]):
    86     78       '''
    87         -    
    88     79       Retrieve a list of favicons for a list of feeds. 
    89     80       Used when combined with /reader/feeds and include_favicons=false, so the feeds request contains far less data. 
    90     81       Useful for mobile devices, but requires a second request. 
           82  +    '''
    91     83       
    92         -    '''
    93         -    url = nb_url + 'reader/favicons?%s'
    94         -    params = urllib.urlencode({'feeds':feeds})
    95         -    results = urllib2.urlopen(url % params).read()
    96         -    return simplejson.loads(results)
           84  +    url = nb_url + 'reader/favicons'
           85  +    payload = {'feeds':feeds}
           86  +    results = requests.get(url, data=payload, cookies=cookies)
           87  +    return simplejson.loads(results.content)
    97     88       
    98     89   def id(id_no):
    99     90       '''
   100         -    
   101     91       Retrieve the original page from a single feed.
           92  +    '''
   102     93       
   103         -    '''
   104         -    url = nb_url + 'reader/page/%d' % id_no
   105         -    results = urllib2.urlopen(url).read()
   106         -    return simplejson.loads(results)
           94  +    url = nb_url + 'reader/page/' % id_no
           95  +    payload = {}
           96  +    results = requests.get(url, data=payload, cookies=cookies)
           97  +    return simplejson.loads(results.content)
   107     98   
   108     99   def refresh_feeds():
   109    100       '''
   110         -    
   111    101       Up-to-the-second unread counts for each active feed.
   112         -        Poll for these counts no more than once a minute.
   113         -        
   114         -        '''
          102  +    Poll for these counts no more than once a minute.
          103  +    '''
   115    104   
   116    105       url = nb_url + 'reader/refresh_feeds'
   117         -    results = urllib2.urlopen(url).read()
   118         -    return simplejson.loads(results)
          106  +    results = requests.get(url, cookies=cookies)
          107  +    return simplejson.loads(results.content)
   119    108   
   120    109   def feeds_trainer(feed_id):
   121    110       '''
   122         -    
   123         -     Retrieves all popular and known intelligence classifiers.
   124         -        Also includes user's own classifiers.
   125         -        
   126         -        '''
          111  +    Retrieves all popular and known intelligence classifiers.
          112  +    Also includes user's own classifiers.
          113  +    '''
   127    114   
   128         -    url = nb_url + 'reader/feeds_trainer?%s'
   129         -    params = urllib.urlencode({'feed_id':feed_id})
   130         -    results = urllib2.urlopen(url % params).read()
   131         -    return simplejson.loads(results)
          115  +    url = nb_url + 'reader/feeds_trainer'
          116  +    payload = {'feed_id':feed_id}
          117  +    results = requests.get(url, data=payload, cookies=cookies)
          118  +    return simplejson.loads(results.content)
   132    119   
   133    120   def statistics(id_no):
   134    121       '''
   135         -    
   136    122       If you only want a user's classifiers, use /classifiers/:id.
   137         -        Omit the feed_id to get all classifiers for all subscriptions.
   138         -        
   139         -        '''
          123  +    Omit the feed_id to get all classifiers for all subscriptions.
          124  +    '''
   140    125   
   141    126       url = nb_url + 'rss_feeds/statistics/%d' % id_no
   142         -    results = urllib2.urlopen(url).read()
   143         -    return simplejson.loads(results)
          127  +    results = requests.get(url, cookies=cookies)
          128  +    return simplejson.loads(results.content)
   144    129   
   145    130   def feed_autocomplete(term):
   146    131       '''
   147         -    
   148    132       Get a list of feeds that contain a search phrase.
   149         -        Searches by feed address, feed url, and feed title, in that order.
   150         -        Will only show sites with 2+ subscribers.
   151         -        
   152         -        '''
          133  +    Searches by feed address, feed url, and feed title, in that order.
          134  +    Will only show sites with 2+ subscribers.
          135  +    '''
          136  +
   153    137       url = nb_url + 'rss_feeds/feed_autocomplete?%'
   154         -    params = urllib.urlencode({'term':term})
   155         -    results = urllib2.urlopen(url % params).read()
   156         -    return simplejson.loads(results)
          138  +    payload = {'term':term}
          139  +    results = requests.get(url, data=payload, cookies=cookies)
          140  +    return simplejson.loads(results.content)
   157    141   
   158         -def feed(id=1):
          142  +def feed(id):
   159    143       '''
   160         -    
   161    144       Retrieve stories from a single feed.
   162         -    
   163    145       '''
   164         -    url = nb_url + 'reader/feed/%d' % id
   165         -    results = urllib2.urlopen(url).read()
   166         -    return simplejson.loads(results)
          146  +
          147  +    url = nb_url + 'reader/feed/%s' % id
          148  +    results = requests.get(url, cookies=cookies)
          149  +    return simplejson.loads(results.content)
   167    150   
   168    151   def starred_stories(page=1):
   169    152       '''
   170         -    
   171    153       Retrieve a user's starred stories.
          154  +    '''
   172    155       
   173         -    '''
   174         -    url = nb_url + 'reader/starred_stories?%s'
   175         -    params = urllib.urlencode({'page':page})
   176         -    results = urllib2.urlopen(url % params).read()
   177         -    return simplejson.loads(results)
          156  +    url = nb_url + 'reader/starred_stories'
          157  +    payload = {'page':page}
          158  +    results = requests.get(url, data=payload, cookies=cookies)
          159  +    return simplejson.loads(results.content)
   178    160   
   179    161   def river_stories(feeds,page=1,read_stories_count=0):
   180    162       '''
   181         -    
   182    163       Retrieve stories from a collection of feeds. This is known as the River of News.
   183         -        Stories are ordered in reverse chronological order.
   184         -        
   185         -        '''
          164  +    Stories are ordered in reverse chronological order.
          165  +    '''
   186    166   
   187         -    url = nb_url + 'reader/river_stories?%s'
   188         -    params = urllib.urlencode({'feeds':feeds,'page':page,'read_stories_count':read_stories_count})
   189         -    results = urllib2.urlopen(url % params).read()
   190         -    return simplejson.loads(results)
          167  +    url = nb_url + 'reader/river_stories'
          168  +    payload = {'feeds':feeds,'page':page,'read_stories_count':read_stories_count}
          169  +    results = urllib2.urlopen(url, data=payload, cookies=cookies)
          170  +    return simplejson.loads(results.content)
   191    171   
   192    172   def mark_story_as_read(story_id,feed_id):
   193    173       '''
   194         -    
   195         -     Mark stories as read.
   196         -        Multiple story ids can be sent at once.
   197         -        Each story must be from the same feed.
   198         -        
   199         -        '''
          174  +    Mark stories as read.
          175  +    Multiple story ids can be sent at once.
          176  +    Each story must be from the same feed.
          177  +    '''
   200    178   
   201    179       url = nb_url + 'reader/mark_story_as_read'
   202         -    params = urllib.urlencode({'story_id':story_id,'feed_id':feed_id})
   203         -    results = urllib2.urlopen(url,params).read()
   204         -    return simplejson.loads(results)
          180  +    payload = {'story_id':story_id,'feed_id':feed_id}
          181  +    results = requests.post(url, data=payload, cookies=cookies)
          182  +    return simplejson.loads(results.content)
   205    183   
   206    184   def mark_story_as_starred(story_id,feed_id):
   207    185       '''
   208         -    
   209    186       Mark a story as starred (saved).
          187  +    '''
   210    188       
   211         -    '''
   212    189       url = nb_url + 'reader/mark_story_as_starred'
   213         -    params = urllib.urlencode({'story_id':story_id,'feed_id':feed_id})
   214         -    results = urllib2.urlopen(url,params).read()
   215         -    return simplejson.loads(results)
          190  +    payload = {'story_id':story_id,'feed_id':feed_id}
          191  +    results = requests.post(url, data=payload, cookies=cookies)
          192  +    return simplejson.loads(results.content)
   216    193   
   217    194   def mark_all_as_read(days=0):
   218    195       '''
   219         -    
   220    196       Mark all stories in a feed or list of feeds as read.
          197  +    '''
   221    198       
   222         -    '''
   223    199       url = nb_url + 'reader/mark_all_as_read'
   224         -    params = urllib.urlencode({'days':days})
   225         -    results = urllib2.urlopen(url,params).read()
   226         -    return simplejson.loads(results)
          200  +    payload = {'days':days}
          201  +    results = requests.post(url, data=payload, cookies=cookies)
          202  +    return simplejson.loads(results.content)
   227    203   
   228    204   def add_url(url,folder='[Top Level]'):
   229    205       '''
   230         -    
   231    206       Add a feed by its URL. 
   232    207       Can be either the RSS feed or the website itself.
          208  +    '''
   233    209       
   234         -    '''
   235    210       url = nb_url + 'reader/add_url'
   236         -    params = urllib.urlencode({'url':url,'folder':folder})
   237         -    results = urllib2.urlopen(url,params).read()
   238         -    return simplejson.loads(results)
          211  +    payload = {'url':url,'folder':folder}
          212  +    results = requests.post(url, data=payload, cookies=cookies)
          213  +    return simplejson.loads(results.content)
   239    214   
   240    215   
   241    216   def add_folder(folder,parent_folder='[Top Level]'):
   242    217       '''
   243         -    
   244    218       Add a new folder.
   245         -    
   246    219       '''
   247    220       
   248    221       url = nb_url + 'reader/add_folder'
   249         -    params = urllib.urlencode({'folder':folder,'parent_folder':parent_folder})
   250         -    results = urllib2.urlopen(url,params).read()
   251         -    return simplejson.loads(results)
          222  +    payload = {'folder':folder,'parent_folder':parent_folder}
          223  +    results = requests.post(url, data=payload, cookies=cookies)
          224  +    return simplejson.loads(results.content)
   252    225   
   253    226   def rename_feed(feed_title,feed_id):
   254    227       '''
   255         -    
   256    228       Rename a feed title. Only the current user will see the new title.
          229  +    '''
   257    230       
   258         -    '''
   259    231       url = nb_url + 'reader/rename_feed'
   260         -    params = urllib.urlencode({'feed_title':feed_title,'feed_id':feed_id})
   261         -    results = urllib2.urlopen(url,params).read()
   262         -    return simplejson.loads(results)
          232  +    payload = {'feed_title':feed_title,'feed_id':feed_id}
          233  +    results = requests.post(url, data=payload, cookies=cookies)
          234  +    return simplejson.loads(results.content)
   263    235   
   264    236   def delete_feed(feed_id,in_folder):
   265    237       '''
   266         -    
   267    238       Unsubscribe from a feed. Removes it from the folder.
   268         -        Set the in_folder parameter to remove a feed from the correct folder, in case the user is subscribed to the feed in multiple folders.
          239  +    Set the in_folder parameter to remove a feed from the correct folder, in case the user is subscribed to the feed in multiple folders.
          240  +    '''    
   269    241   
   270         -        '''    
   271    242       url = nb_url + 'reader/delete_feed'
   272         -    params = urllib.urlencode({'feed_id':feed_id,'in_folder':in_folder})
   273         -    results = urllib2.urlopen(url,params).read()
   274         -    return simplejson.loads(results)
          243  +    payload = {'feed_id':feed_id,'in_folder':in_folder}
          244  +    results = requests.post(url, data=payload, cookies=cookies)
          245  +    return simplejson.loads(results.content)
   275    246   
   276    247   def rename_folder(folder_to_rename,new_folder_name,in_folder):
   277    248       '''
   278         -    
   279    249       Rename a folder.
          250  +    '''
   280    251       
   281         -    '''
   282    252       url = nb_url + 'reader/rename_folder'
   283         -    params = urllib.urlencode({'folder_to_rename':folder_to_rename,'new_folder_name':new_folder_name,'in_folder':in_folder})
   284         -    results = urllib2.urlopen(url,params).read()
   285         -    return simplejson.loads(results)
          253  +    payload = {'folder_to_rename':folder_to_rename,'new_folder_name':new_folder_name,'in_folder':in_folder}
          254  +    results = requests.post(url, data=payload, cookies=cookies)
          255  +    return simplejson.loads(results.content)
   286    256   
   287    257   def delete_folder(folder_to_delete,in_folder,feed_id):
   288    258       '''
   289         -    
   290    259       Delete a folder and unsubscribe from all feeds inside.
          260  +    '''
   291    261       
   292         -    '''
   293    262       url = nb_url + 'reader/delete_folder'
   294         -    params = urllib.urlencode({'folder_to_delete':folder_to_delete,'in_folder':in_folder,'feed_id':feed_id})
   295         -    results = urllib2.urlopen(url,params).read()
   296         -    return simplejson.loads(results)
          263  +    payload = {'folder_to_delete':folder_to_delete,'in_folder':in_folder,'feed_id':feed_id}
          264  +    results = requests.post(url, data=payload, cookies=cookies)
          265  +    return simplejson.loads(results.content)
   297    266   
   298    267   
   299    268   def mark_feed_as_read(feed_id):
   300    269       '''
   301         -    
   302    270       Mark a list of feeds as read.
   303         -    
   304    271       '''
          272  +    
   305    273       url = nb_url + 'reader/mark_feed_as_read'
   306         -    params = urllib.urlencode({'feed_id':feed_id})
   307         -    results = urllib2.urlopen(url,params).read()
   308         -    return simplejson.loads(results)
          274  +    payload = {'feed_id':feed_id}
          275  +    results = requests.post(url, data=payload, cookies=cookies)
          276  +    return simplejson.loads(results.content)
   309    277   
   310    278   
   311    279   def save_feed_order(folders):
   312    280       '''
   313         -    
   314    281       Reorder feeds and move them around between folders.
   315         -        The entire folder structure needs to be serialized.
   316         -        
   317         -        '''
          282  +    The entire folder structure needs to be serialized.
          283  +    '''
   318    284   
   319    285       url = nb_url + 'reader/save_feed_order'
   320         -    params = urllib.urlencode({'folders':folders})
   321         -    results = urllib2.urlopen(url,params).read()
   322         -    return simplejson.loads(results)
          286  +    payload = {'folders':folders}
          287  +    results = requests.post(url, data=payload, cookies=cookies)
          288  +    return simplejson.loads(results.content)
   323    289   
   324    290   
   325    291   def classifier(id_no):
   326    292       '''
   327         -    
   328         -        Get the intelligence classifiers for a user's site.
   329         -        Only includes the user's own classifiers. 
   330         -        Use /reader/feeds_trainer for popular classifiers.
   331         -        
   332         -        '''
          293  +    Get the intelligence classifiers for a user's site.
          294  +    Only includes the user's own classifiers. 
          295  +    Use /reader/feeds_trainer for popular classifiers.
          296  +    '''
   333    297   
   334    298       url = nb_url + 'classifier/%d' % id_no
   335         -    results = urllib2.urlopen(url).read()
   336         -    return simplejson.loads(results)
          299  +    results = requests.get(url)
          300  +    return simplejson.loads(results.content)
   337    301   
   338    302   
   339    303   def classifier_save(like_type,dislike_type,remove_like_type,remove_dislike_type):
   340    304       '''
   341         -    
   342    305       Save intelligence classifiers (tags, titles, authors, and the feed) for a feed.
          306  +    '''
   343    307       
   344         -        '''
   345    308       url = nb_url + 'classifier/save'
   346         -    params = urllib.urlencode({'like_[TYPE]':like_type,
          309  +    payload = {'like_[TYPE]':like_type,
   347    310                      'dislike_[TYPE]':dislike_type,
   348    311                       'remove_like_[TYPE]':remove_like_type,
   349         -                   'remove_dislike_[TYPE]':remove_dislike_type})
   350         -    results = urllib2.urlopen(url,params).read()
   351         -    return simplejson.loads(results)
          312  +                   'remove_dislike_[TYPE]':remove_dislike_type}
          313  +    results = requests.post(url, data=payload, cookies=cookies)
          314  +    return simplejson.loads(results.content)
   352    315   
   353    316   
   354    317   def opml_export():
   355    318       '''
   356         -    
   357    319       Download a backup of feeds and folders as an OPML file.
   358         -        Contains folders and feeds in XML; useful for importing in another RSS reader.
   359         -        
   360         -        '''
          320  +    Contains folders and feeds in XML; useful for importing in another RSS reader.
          321  +    '''
          322  +    
   361    323       url = nb_url + 'import/opml_export'
   362         -    results = urllib2.urlopen(url).read()
   363         -    return simplejson.loads(results)
          324  +    results = requests.get(url)
          325  +    return simplejson.loads(results.content)
   364    326   
   365    327   
   366    328   
   367    329   def opml_upload(opml_file):
   368    330       '''
   369         -    
   370    331       Upload an OPML file.
   371         -    
   372    332       '''
          333  +    
   373    334       url = nb_url + 'import/opml_upload'
   374    335       f = open(opml_file)
   375         -    params = urllib.urlencode({'file':f})
          336  +    payload = {'file':f}
   376    337       f.close()
   377         -    results = urllib2.urlopen(url,params).read()
   378         -    return simplejson.loads(results)
          338  +    results = requests.post(url, data=payload, cookies=cookies)
          339  +    return simplejson.loads(results.content)