lists.arthurdejong.org
RSS feed

webcheck commit: r422 - webcheck/plugins

[Date Prev][Date Next] [Thread Prev][Thread Next]

webcheck commit: r422 - webcheck/plugins



Author: arthur
Date: Thu Aug  4 22:26:43 2011
New Revision: 422
URL: http://arthurdejong.org/viewvc/webcheck?view=rev&revision=422

Log:
small style updates to SQLAlchemy constructs

Modified:
   webcheck/plugins/badlinks.py
   webcheck/plugins/external.py
   webcheck/plugins/images.py
   webcheck/plugins/new.py
   webcheck/plugins/notchkd.py
   webcheck/plugins/notitles.py
   webcheck/plugins/old.py
   webcheck/plugins/problems.py
   webcheck/plugins/sitemap.py
   webcheck/plugins/urllist.py

Modified: webcheck/plugins/badlinks.py
==============================================================================
--- webcheck/plugins/badlinks.py        Thu Aug  4 21:46:26 2011        (r421)
+++ webcheck/plugins/badlinks.py        Thu Aug  4 22:26:43 2011        (r422)
@@ -35,7 +35,7 @@
 def generate(site):
     """Present the list of bad links to the given file descriptor."""
     # find all links with link problems
-    links = site.links.filter(db.Link.linkproblems.any()).order_by('url')
+    links = site.links.filter(db.Link.linkproblems.any()).order_by(db.Link.url)
     # present results
     fp = plugins.open_html(plugins.badlinks, site)
     if not links:

Modified: webcheck/plugins/external.py
==============================================================================
--- webcheck/plugins/external.py        Thu Aug  4 21:46:26 2011        (r421)
+++ webcheck/plugins/external.py        Thu Aug  4 22:26:43 2011        (r422)
@@ -35,7 +35,7 @@
 def generate(site):
     """Generate the list of external links to the given file descriptor."""
     # get all external links
-    links = site.links.filter(db.Link.is_internal != True).order_by('url')
+    links = site.links.filter(db.Link.is_internal != 
True).order_by(db.Link.url)
     # present results
     fp = plugins.open_html(plugins.external, site)
     if not links:

Modified: webcheck/plugins/images.py
==============================================================================
--- webcheck/plugins/images.py  Thu Aug  4 21:46:26 2011        (r421)
+++ webcheck/plugins/images.py  Thu Aug  4 22:26:43 2011        (r422)
@@ -29,7 +29,6 @@
 __outputfile__ = 'images.html'
 
 import re
-from sqlalchemy.sql.expression import or_
 
 import db
 import plugins
@@ -38,9 +37,9 @@
 def generate(site):
     """Output a list of images to the given file descriptor."""
     # get non-page images that have an image/* mimetype
-    links = site.links.filter(or_(db.Link.is_page != True, db.Link.is_page == 
None))
+    links = site.links.filter((db.Link.is_page != True) | (db.Link.is_page == 
None))
     links = links.filter(db.Link.mimetype.startswith('image/'))
-    links = links.order_by('url')
+    links = links.order_by(db.Link.url)
     # present results
     fp = plugins.open_html(plugins.images, site)
     if not links:

Modified: webcheck/plugins/new.py
==============================================================================
--- webcheck/plugins/new.py     Thu Aug  4 21:46:26 2011        (r421)
+++ webcheck/plugins/new.py     Thu Aug  4 22:26:43 2011        (r422)
@@ -43,7 +43,7 @@
     newtime = time.time() - SECS_PER_DAY * config.REPORT_WHATSNEW_URL_AGE
     # get all internal pages that are new
     links = site.links.filter_by(is_page=True, is_internal=True)
-    links = links.filter(db.Link.mtime > newtime).order_by('-mtime')
+    links = links.filter(db.Link.mtime > 
newtime).order_by(db.Link.mtime.desc())
     # present results
     fp = plugins.open_html(plugins.new, site)
     if not links:

Modified: webcheck/plugins/notchkd.py
==============================================================================
--- webcheck/plugins/notchkd.py Thu Aug  4 21:46:26 2011        (r421)
+++ webcheck/plugins/notchkd.py Thu Aug  4 22:26:43 2011        (r422)
@@ -35,7 +35,7 @@
 def generate(site):
     """Output the list of not checked pages to the given file descriptor."""
     # get all yanked urls
-    links = site.links.filter(db.Link.yanked != None).order_by('url')
+    links = site.links.filter(db.Link.yanked != None).order_by(db.Link.url)
     # present results
     fp = plugins.open_html(plugins.notchkd, site)
     if not links:

Modified: webcheck/plugins/notitles.py
==============================================================================
--- webcheck/plugins/notitles.py        Thu Aug  4 21:46:26 2011        (r421)
+++ webcheck/plugins/notitles.py        Thu Aug  4 22:26:43 2011        (r422)
@@ -40,7 +40,7 @@
     # get all internal pages without a title
     links = site.links.filter_by(is_page=True, is_internal=True)
     links = links.filter(or_(char_length(db.Link.title) == 0,
-                             db.Link.title ==None)).order_by('url')
+                             db.Link.title ==None)).order_by(db.Link.url)
     # present results
     fp = plugins.open_html(plugins.notitles, site)
     if not links:

Modified: webcheck/plugins/old.py
==============================================================================
--- webcheck/plugins/old.py     Thu Aug  4 21:46:26 2011        (r421)
+++ webcheck/plugins/old.py     Thu Aug  4 22:26:43 2011        (r422)
@@ -43,7 +43,7 @@
     oldtime = time.time() - SECS_PER_DAY * config.REPORT_WHATSOLD_URL_AGE
     # get all internal pages that are old
     links = site.links.filter_by(is_page=True, is_internal=True)
-    links = links.filter(db.Link.mtime < oldtime).order_by('mtime').all()
+    links = links.filter(db.Link.mtime < oldtime).order_by(db.Link.mtime).all()
     # present results
     fp = plugins.open_html(plugins.old, site)
     if not links:

Modified: webcheck/plugins/problems.py
==============================================================================
--- webcheck/plugins/problems.py        Thu Aug  4 21:46:26 2011        (r421)
+++ webcheck/plugins/problems.py        Thu Aug  4 22:26:43 2011        (r422)
@@ -53,7 +53,7 @@
     problem_db = {}
     # get internal links with page problems
     links = site.links.filter_by(is_internal=True)
-    links = links.filter(db.Link.pageproblems.any()).order_by('url')
+    links = links.filter(db.Link.pageproblems.any()).order_by(db.Link.url)
     for link in links:
         # make a normal name for the author
         if link.author:

Modified: webcheck/plugins/sitemap.py
==============================================================================
--- webcheck/plugins/sitemap.py Thu Aug  4 21:46:26 2011        (r421)
+++ webcheck/plugins/sitemap.py Thu Aug  4 22:26:43 2011        (r422)
@@ -28,7 +28,6 @@
 __author__ = 'Arthur de Jong'
 __outputfile__ = 'index.html'
 
-from sqlalchemy.sql.expression import or_
 from sqlalchemy.orm.session import object_session
 
 import config
@@ -44,8 +43,7 @@
     qry = links.filter(db.Link.linked_from.contains(link))
     qry = qry.filter(db.Link.is_internal == True)
     if link.depth:
-        qry = qry.filter(or_(db.Link.depth > link.depth, db.Link.depth == 
None))
-    #qry = qry.filter(~db.Link.id.in_(explored))
+        qry = qry.filter((db.Link.depth > link.depth) | (db.Link.depth == 
None))
     # follow redirects
     children.update(y
                     for y in (x.follow_link() for x in qry)

Modified: webcheck/plugins/urllist.py
==============================================================================
--- webcheck/plugins/urllist.py Thu Aug  4 21:46:26 2011        (r421)
+++ webcheck/plugins/urllist.py Thu Aug  4 22:26:43 2011        (r422)
@@ -26,6 +26,7 @@
 __author__ = 'Arthur de Jong'
 __outputfile__ = 'urllist.html'
 
+import db
 import plugins
 
 
@@ -39,7 +40,7 @@
       '    non-examined urls.\n'
       '   </p>\n'
       '   <ol>\n' )
-    links = site.links.order_by('url')
+    links = site.links.order_by(db.Link.url)
     for link in links:
         fp.write('    <li>' + plugins.make_link(link, link.url) + '</li>\n')
     fp.write(
-- 
To unsubscribe send an email to
webcheck-commits-unsubscribe@lists.arthurdejong.org or see
http://lists.arthurdejong.org/webcheck-commits