2015-11-23: Resync svn with production site.
David Blume

David Blume commited on 2018-01-20 20:30:31
Showing 1 changed files, with 31 additions and 24 deletions.

... ...
@@ -108,9 +108,10 @@ def asciiize( s ):
108 108
     except exceptions.AttributeError, e:
109 109
         return s
110 110
 
111
+
111 112
 def sendEmail( subject, message, toaddrs, fromaddr='"techcrunch.py" <techcrunch@techcrunch.dlma.com>' ):
112 113
     """Sends Email"""
113
-    smtp = smtplib.SMTP( 'localhost' )
114
+    smtp = smtplib.SMTP( 'localhost', port=587 )
114 115
     smtp.login( user, passw )
115 116
     smtp.sendmail( fromaddr, \
116 117
                    toaddrs, \
... ...
@@ -254,8 +259,8 @@ def process_feed( yaml_items ):
254 259
                 if hasattr( feed, 'bozo_exception' ) and isinstance( feed.bozo_exception, xml.sax._exceptions.SAXParseException ):
255 260
                     print "Didn't pickle TechCrunch feed because it had a bozo_exception: %s" % ( str( feed.bozo_exception ) )
256 261
                 else:
257
-                    f = file( os.path.join( localdir, 'techcrunch_feed.pickle' ), 'wb' )
258 262
                     try:
263
+                        with open( os.path.join( localdir, 'techcrunch_feed.pickle' ), 'wb' ) as f:
259 264
                             pickle.dump( feed, f )
260 265
                     except( pickle.PicklingError, exceptions.TypeError ), e:
261 266
                         print "An error occurred while pickling the feed: %s." % \
... ...
@@ -263,7 +268,6 @@ def process_feed( yaml_items ):
263 268
                                 str(e) )
264 269
                         traceback.print_exc( 3, file = sys.stdout )
265 270
                         feed_is_modified = False
266
-                    f.close()
267 271
 
268 272
             for i in reversed( feed.entries ):
269 273
                 process_item( i, yaml_items )
... ...
@@ -490,7 +500,13 @@ def Get_num_disqus_comments( url_string, disqus_id, cookie ):
490 500
         print "Get_num_disqus_comments found no disqus tag for", url_string
491 501
     return -1
492 502
 
503
+
493 504
 def Get_num_retweets_unused( yaml_item ):
505
+    """ TODO: Support for retweents has been removed.
506
+    See: https://twittercommunity.com/t/a-new-design-for-tweet-and-follow-buttons/52791
507
+    So instead, use facebook.
508
+    curl https://graph.facebook.com/fql?q=SELECT%20total_count,comment_count,like_count,share_count%20FROM%20link_stat%20WHERE%20url=%27http://techcrunch.com/2015/11/22/the-real-reason-on-demand-startups-are-reclassifying-workers/?ncid=rss%27
509
+    """
494 510
     url_string = yaml_item['link']
495 511
     try:
496 512
         f = urllib2.urlopen( 'http://api.tweetmeme.com/button.js?url=%s' % ( url_string ) )
... ...
@@ -520,7 +536,13 @@ def Get_num_retweets_unused( yaml_item ):
520 536
                           )
521 537
     return -1
522 538
 
539
+
523 540
 def Get_num_retweets( yaml_item ):
541
+    """ TODO: Support for retweents has been removed.
542
+    See: https://twittercommunity.com/t/a-new-design-for-tweet-and-follow-buttons/52791
543
+    So instead, use facebook.
544
+    curl https://graph.facebook.com/fql?q=SELECT%20total_count,comment_count,like_count,share_count%20FROM%20link_stat%20WHERE%20url=%27http://techcrunch.com/2015/11/22/the-real-reason-on-demand-startups-are-reclassifying-workers/?ncid=rss%27
545
+    """
524 546
     url_string = yaml_item['link']
525 547
     try:
526 548
         f = urllib2.urlopen( 'http://urls.api.twitter.com/1/urls/count.json?url=%s&callback=twttr.receiveCount' % \
... ...
@@ -591,17 +614,16 @@ def Save_image( url_string, file_path ):
591 614
         f.close()
592 615
     except (urllib2.URLError, httplib.BadStatusLine), e:
593 616
         if hasattr( e, 'reason' ): # URLError
594
-            print "Save_image got an error attempting to create", file_path, "Reason:", e.reason
617
+            print "Save_image: Error attempting to create", file_path[file_path.rfind('/')+1:], "Reason:", e.reason
595 618
         elif hasattr( e, 'code' ): # URLError
596
-            print "Save_image got an error attempting to create", file_path, "Code:", e.code
619
+            print "Save_image: Error attempting to create", file_path[file_path.rfind('/')+1:], "Code:", e.code
597 620
         else:
598
-            print "Save_image got an error from urlopen", e
621
+            print "Save_image: Error from urlopen", e
599 622
         return url_string
600 623
 
601 624
     if len( data ) > 50:
602
-        f = open( file_path, 'wb' )
625
+        with open( file_path, 'wb' ) as f:
603 626
             f.write( data )
604
-        f.close()
605 627
         return 'cache/' + os.path.basename( file_path )
606 628
     return url_string
607 629
 
... ...
@@ -665,8 +688,9 @@ def Make_index_html( yaml_items, weekend_stats, weekday_stats ):
665 688
     for fname in files_to_delete:
666 689
         os.unlink( fname )
667 690
 
691
+
668 692
 def Make_feed_file( yaml_items ):
669
-    f = open( os.path.join( localdir, 'rss_feed.xml' ), 'wb' )
693
+    with open( os.path.join( localdir, 'rss_feed.xml' ), 'wb' ) as f:
670 694
         f.write( "<?xml version=\"1.0\" encoding=\"iso-8859-1\"?>\n<rss version=\"2.0\">\n<channel>\n<title>Trending at TechCrunch</title><link>http://techcrunch.dlma.com</link>" )
671 695
         f.write( "<pubDate>%s</pubDate><description>Automatically Generated Feed</description><language>en-us</language>\n" % ( time.strftime( "%a, %d %b %Y %H:%M:%S +0000", time.gmtime() ) ) )
672 696
         count = 0
... ...
@@ -681,7 +705,7 @@ def Make_feed_file( yaml_items ):
681 705
                 if count > 14:
682 706
                     break
683 707
         f.write( "</channel></rss>" )
684
-    f.close()
708
+
685 709
 
686 710
 if __name__=='__main__':
687 711
     start_time = time.time()
... ...
@@ -715,14 +739,13 @@ if __name__=='__main__':
715 739
         #
716 740
         yaml_fullpath = os.path.join( localdir, 'techcrunch.yaml' )
717 741
         if os.path.exists( yaml_fullpath ):
718
-            f = file( yaml_fullpath, 'rb' )
742
+            with open( yaml_fullpath, 'rb' ) as f:
719 743
                 items = yaml.load( f )
720 744
 
721 745
                 # Do any dictionary item updating that might be necessary
722 746
 #                for item in items:
723 747
 #                    if not item.has_key( 'fb_shares' ):
724 748
 #                        item['fb_shares'] = []
725
-            f.close()
726 749
         else:
727 750
             print "could not open", yaml_fullpath
728 751
             items = []
... ...
@@ -771,16 +794,16 @@ if __name__=='__main__':
771 794
 
772 795
             # For the one file we really use, write to a file on the side, then move it.
773 796
             yaml_newfile_fullpath = os.path.join( localdir, 'techcrunch_temp_writable.yaml' )
774
-            f = file( yaml_newfile_fullpath, 'wb' )
797
+            with open( yaml_newfile_fullpath, 'wb' ) as f:
775 798
                 yaml.dump( items, f, width=120 )
776
-            f.close()
799
+            try:
777 800
                 os.rename( yaml_newfile_fullpath, yaml_fullpath )
778
-            f = file( os.path.join( localdir, 'techcrunch_text.yaml' ), 'w' )
801
+            except OSError as e:
802
+                print "The source file was", yaml_newfile_fullpath, "and exists =", os.path.isfile(yaml_newfile_fullpath)
803
+            with open( os.path.join( localdir, 'techcrunch_text.yaml' ), 'w' ) as f:
779 804
                 yaml.dump( items, f, width=120 )
780
-            f.close()
781
-            f = codecs.open( os.path.join( localdir, 'techcrunch_unicode.yaml' ), 'w', 'utf-8' )
805
+            with codecs.open( os.path.join( localdir, 'techcrunch_unicode.yaml' ), 'w', 'utf-8' ) as f:
782 806
                 yaml.dump( items, f, encoding='utf-8', width=120 )
783
-            f.close()
784 807
 
785 808
             Make_feed_file( items )
786 809
 
... ...
@@ -807,17 +830,13 @@ if __name__=='__main__':
807 830
 
808 831
     # Finally, let's save this to a statistics page
809 832
     if os.path.exists( os.path.join( localdir, 'stats.txt' ) ):
810
-        f = open( os.path.join( localdir, 'stats.txt' ))
811
-        try:
833
+        with open( os.path.join( localdir, 'stats.txt' )) as f:
812 834
             lines = f.readlines()
813
-        finally:
814
-            f.close()
815 835
     else:
816 836
         lines = []
817 837
     lines = lines[:168] # Just keep the past week's worth
818 838
     # status = len( message.strip() ) and message.strip().replace( '\n', ' - ' ) or "OK"
819 839
     status = len( message.strip() ) and '\n                       '.join( message.splitlines() ) or "OK"
820 840
     lines.insert( 0, "%s %3.0fs %s\n" % ( time.strftime('%Y-%m-%d, %H:%M', time.localtime()), time.time() - start_time, status ))
821
-    f = open( os.path.join( localdir,'stats.txt' ), 'w' )
841
+    with open( os.path.join( localdir,'stats.txt' ), 'w' ) as f:
822 842
         f.writelines( lines )
823
-    f.close()
824 843