使用Python 2.7.3的自动背景更改程序不起作用,尽管它应该起作用



我对Ubuntu/Python/Bash/Gnome很陌生,所以我仍然觉得我有可能做错了什么,但已经三天没有成功了。。。

以下是脚本应该执行的操作:
*[✓]从wallbase.cc下载1张随机图片
*[✓]将其保存到运行脚本的同一目录
*[x]将其设置为壁纸

有两次尝试设置壁纸,两次使用不同的命令,在脚本中不起作用。有一个print语句(从底部起的第二行),它吐出了正确的终端命令,因为我可以C&P打印结果,它运行良好,只是在脚本中执行时不起作用。

#!/usr/bin/env python
import urllib2
import os
from gi.repository import Gio
response = urllib2.urlopen("http://wallbase.cc/random/12/eqeq/1366x768/0.000/100/32")
page_source = response.read()
thlink_pos = page_source.find("ico-X")
address_start = (page_source.find("href="", thlink_pos) + 6)
address_end = page_source.find(""", address_start + 1)
response = urllib2.urlopen(page_source[address_start:address_end])
page_source = response.read()
bigwall_pos = page_source.find("bigwall")
address_start = (page_source.find("src="", bigwall_pos) + 5)
address_end = page_source.find(""", address_start + 1)
address = page_source[address_start:address_end]
slash_pos = address.rfind("/") + 1
pic_name = address[slash_pos:]
bashCommand = "wget " + page_source[address_start:address_end]
os.system(bashCommand)
print "Does my new image exists?", os.path.exists(os.getcwd() + "/" + pic_name)
#attempt 1
settings = Gio.Settings.new("org.gnome.desktop.background")
settings.set_string("picture-uri", "file://" + os.getcwd() + "/" + pic_name)
settings.apply()
#attempt 2
bashCommand = "gsettings set org.gnome.desktop.background picture-uri file://" + os.getcwd() + "/" + pic_name
print bashCommand
os.system(bashCommand)
settings.apply()

您已经成功更改了设置,但仍然没有应用,请尝试下一步:

settings.apply()

设置"picture-uri"字符串后。

它适用于我(Ubuntu 12.04)。

我修改了你的脚本(与你的错误无关):

#!/usr/bin/python
"""Set desktop background using random images from http://wallbase.cc
It uses `gi.repository.Gio.Settings` to set the background.
"""
import functools
import itertools
import logging
import os
import posixpath
import random
import re
import sys
import time
import urllib
import urllib2
import urlparse
from collections import namedtuple
from bs4 import BeautifulSoup  # $ sudo apt-get install python-bs4
from gi.repository.Gio import Settings  # pylint: disable=F0401,E0611
DEFAULT_IMAGE_DIR = os.path.expanduser('~/Pictures/backgrounds')
HTMLPAGE_SIZE_MAX = 1 << 20  # bytes
TIMEOUT_MIN = 300  # seconds
TIMEOUT_DELTA = 30  # jitter
# "Anime/Manga", "Wallpapers/General", "High Resolution Images"
CATEGORY_W, CATEGORY_WG, CATEGORY_HR = range(1, 4)
PURITY_SFW, PURITY_SKETCHY, PURITY_NSFW, PURITY_DEFAULT = 4, 2, 1, 0
DAY_IN_SECONDS = 86400

UrlRetreiveResult = namedtuple('UrlRetreiveResult', "path headers")

def set_background(image_path, check_exist=True):
    """Change desktop background to image pointed by `image_path`.
    """
    if check_exist:  # make sure we can read it (at this time)
        with open(image_path, 'rb') as f:
            f.read(1)
    # prepare uri
    path = os.path.abspath(image_path)
    if isinstance(path, unicode):  # quote() doesn't like unicode
        path = path.encode('utf-8')
    uri = 'file://' + urllib.quote(path)
    # change background
    bg_setting = Settings.new('org.gnome.desktop.background')
    bg_setting.set_string('picture-uri', uri)
    bg_setting.apply()

def url2filename(url):
    """Return basename corresponding to url.
    >>> url2filename('http://example.com/path/to/file?opt=1')
    'file'
    """
    urlpath = urlparse.urlsplit(url).path  # pylint: disable=E1103
    basename = posixpath.basename(urllib.unquote(urlpath))
    if os.path.basename(basename) != basename:
        raise ValueError  # refuse 'dir%5Cbasename.ext' on Windows
    return basename

def download(url, dirpath, extensions=True, filename=None):
    """Download url to dirpath.
    Use basename of the url path as a filename.
    Create destination directory if necessary.
    Use `extensions` to require the file to have an extension or any
    of in a given sequence of extensions.
    Return (path, headers) on success.
    Don't retrieve url if path exists (headers are None in this case).
    """
    if not os.path.isdir(dirpath):
        os.makedirs(dirpath)
        logging.info('created directory %s', dirpath)
    # get filename from the url
    filename = url2filename(url) if filename is None else filename
    if os.path.basename(filename) != filename:
        logging.critical('filename must not have path separator in it "%s"',
                         filename)
        return
    if extensions:
        # require the file to have an extension
        root, ext = os.path.splitext(filename)
        if root and len(ext) > 1:
            # require the extension to be in the list
            try:
                it = iter(extensions)
            except TypeError:
                pass
            else:
                if ext not in it:
                    logging.warn(("file extension is not in the list"
                                  " url=%s"
                                  " extensions=%s"),
                                 url, extensions)
                    return
        else:
            logging.warn("file has no extension url=%s", url)
            return
    # download file
    path = os.path.join(dirpath, filename)
    logging.info("%sn%s", url, path)
    if os.path.exists(path):  # don't retrieve if path exists
        logging.info('path exists')
        return UrlRetreiveResult(path, None)
    try:
        return UrlRetreiveResult(*urllib.urlretrieve(url, path,
                                                     _print_download_status))
    except IOError:
        logging.warn('failed to download {url} -> {path}'.format(
            url=url, path=path))

def _print_download_status(block_count, block_size, total_size):
    logging.debug('%10s bytes of %s', block_count * block_size, total_size)

def min_time_between_calls(min_delay):
    """Enforce minimum time delay between calls."""
    def decorator(func):
        lastcall = [None]  # emulate nonlocal keyword
        @functools.wraps(func)
        def wrapper(*args, **kwargs):
            if lastcall[0] is not None:
                delay = time.time() - lastcall[0]
                if delay < min_delay:
                    _sleep(min_delay - delay)
            lastcall[0] = time.time()
            return func(*args, **kwargs)
        return wrapper
    return decorator

@min_time_between_calls(5)
def _makesoup(url):
    try:
        logging.info(vars(url) if isinstance(url, urllib2.Request) else url)
        page = urllib2.urlopen(url)
        soup = BeautifulSoup(page.read(HTMLPAGE_SIZE_MAX))
        return soup
    except (IOError, OSError) as e:
        logging.warn('failed to return soup for %s, error: %s',
                     getattr(url, 'get_full_url', lambda: url)(), e)

class WallbaseImages:
    """Given parameters it provides image urls to download."""
    def __init__(self,
                 categories=None,  # default; sequence of CATEGORY_*
                 resolution_exactly=True,  # False means 'at least'
                 resolution=None,  # all; (width, height)
                 aspect_ratios=None,  # all; sequence eg, [(5,4),(16,9)]
                 purity=PURITY_DEFAULT,  # combine with |
                 thumbs_per_page=None,  # default; an integer
                 ):
        """See usage below."""
        self.categories = categories
        self.resolution_exactly = resolution_exactly
        self.resolution = resolution
        self.aspect_ratios = aspect_ratios
        self.purity = purity
        self.thumbs_per_page = thumbs_per_page
    def _as_request(self):
        """Create a urllib2.Request() using given parameters."""
        # make url
        if self.categories is not None:
            categories = "".join(str(n) for n in (2, 1, 3)
                                 if n in self.categories)
        else:  # default
            categories = "0"
        if self.resolution_exactly:
            at_least_or_exactly_resolution = "eqeq"
        else:
            at_least_or_exactly_resolution = "gteq"
        if self.resolution is not None:
            resolution = "{width:d}x{height:d}".format(
                width=self.resolution[0], height=self.resolution[1])
        else:
            resolution = "0x0"
        if self.aspect_ratios is not None:
            aspect_ratios = "+".join("%.2f" % (w / float(h),)
                                     for w, h in self.aspect_ratios)
        else:  # default
            aspect_ratios = "0"
        purity = "{0:03b}".format(self.purity)
        thumbs = 20 if self.thumbs_per_page is None else self.thumbs_per_page
        url = ("http://wallbase.cc/random/"
               "{categories}/"
               "{at_least_or_exactly_resolution}/{resolution}/"
               "{aspect_ratios}/"
               "{purity}/{thumbs:d}").format(**locals())
        logging.info(url)
        # make post data
        data = urllib.urlencode(dict(query='', board=categories, nsfw=purity,
                                     res=resolution,
                                     res_opt=at_least_or_exactly_resolution,
                                     aspect=aspect_ratios,
                                     thpp=thumbs))
        req = urllib2.Request(url, data)
        return req
    def __iter__(self):
        """Yield background image urls."""
        # find links to bigwall pages
        # css-like: #thumbs div[class="thumb"] 
        #      a[class~="thlink" and href^="http://"]
        soup = _makesoup(self._as_request())
        if not soup:
            logging.warn("can't retrieve the main page")
            return
        thumbs_soup = soup.find(id="thumbs")
        for thumb in thumbs_soup.find_all('div', {'class': "thumb"}):
            bigwall_a = thumb.find('a', {'class': "thlink",
                                         'href': re.compile(r"^http://")})
            if bigwall_a is None:
                logging.warn("can't find thlink link")
                continue  # try the next thumb
            # find image url on the bigwall page
            # css-like: #bigwall > img[alt and src^="http://"]
            bigwall_soup = _makesoup(bigwall_a['href'])
            if bigwall_soup is not None:
                bigwall = bigwall_soup.find(id='bigwall')
                if bigwall is not None:
                    img = bigwall.find('img',
                                       src=re.compile(r"(?i)^http://.*.jpg$"),
                                       alt=True)
                    if img is not None:
                        url = img['src']
                        filename = url2filename(url)
                        if filename.lower().endswith('.jpg'):
                            yield url, filename  # successfully found image url
                        else:
                            logging.warn('suspicious url "%s"', url)
                        continue
            logging.warn("can't parse bigwall page")

def main():
    level = logging.INFO
    if '-d' in sys.argv:
        sys.argv.remove('-d')
        level = logging.DEBUG
    # configure logging
    logging.basicConfig(format='%(levelname)s: %(asctime)s %(message)s',
                        level=level, datefmt='%Y-%m-%d %H:%M:%S %Z')
    if len(sys.argv) > 1:
        backgrounds_dir = sys.argv[1]
    else:
        backgrounds_dir = DEFAULT_IMAGE_DIR
    # infinite loop: Press Ctrl+C to interrupt it
    #NOTE: here's some arbitrary logic: modify for you needs e.g., break
    # after the first image found
    timeout = TIMEOUT_MIN  # seconds
    for i in itertools.cycle(xrange(timeout, DAY_IN_SECONDS)):
        found = False
        try:
            for url, filename in WallbaseImages(
                    categories=[CATEGORY_WG, CATEGORY_HR, CATEGORY_W],
                    purity=PURITY_SFW,
                    thumbs_per_page=60):
                res = download(url, backgrounds_dir, extensions=('.jpg',),
                               filename=filename)
                if res and res.path:
                    found = True
                    set_background(res.path)
                # don't hammer the site
                timeout = max(TIMEOUT_MIN, i % DAY_IN_SECONDS)
                _sleep(random.randint(timeout, timeout + TIMEOUT_DELTA))
        except Exception:  # pylint: disable=W0703
            logging.exception('unexpected error')
            _sleep(timeout)
        else:
            if not found:
                logging.error('failed to retrieve any images')
                _sleep(timeout)
        timeout = (timeout * 2) % DAY_IN_SECONDS

def _sleep(timeout):
    """Add logging to time.sleep() call."""
    logging.debug('sleep for %s seconds', timeout)
    time.sleep(timeout)

main()

尝试实现一个python脚本,该脚本使用PIL库在图像上写入文本,然后使用Gio类更新Gnome背景"picture uri"以指向该图像。python脚本会在两个图像之间来回切换,以始终修改未使用的图像,然后尝试通过更新设置来"切换"。这样做是为了避免任何闪烁,因为修改当前背景会直接使其暂时消失。在shell中直接调用脚本时,我很少看到任何问题,但在cronjob中,它根本不会在pong上更新。我同时使用了同步和应用程序,在尝试切换图像之前会等待几分钟。没用。尝试将cron作为用户(su-c"cmd"用户),但也不起作用。

当我注意到Gnome会检测到后台文件中的任何更改并进行更新时,我终于放弃了乒乓球的方法。所以放弃了乒乓方法,转到一个临时文件,我只是使用shutil库在当前后台复制它。工作起来很有魅力。

最新更新