Random Google Images - v1.0b

Publicado por erick Darko (última atualização em 15/01/2019)

[ Hits: 2.348 ]

Homepage: ...

Download 6937.random_google_images.py




Está cansado de procurar um wallpaper legal para sua areá de trabalho? Com esse script escrito em Python 3 você pode pesquisar por imagens no Google e utilizando o visualizador de imagens feh defini-las como seu wallpaper sem ter que fazer quase nada.

Dependências:

feh     # duh !
bs4  - BeautifulSoup  

Instalação em distros  baseadas no Debian:  

$ sudo apt-get install feh python3-bs4 -y

Executando o script:

$ python3 random_google_images.py wallpaper 4k -d 5

Para ver mais opções:

$ python3 random_google_images.py --help

  



Esconder código-fonte

#!/usr/bin/env python3.5
try:
   from bs4 import BeautifulSoup
except ImportError as err:
   sys.exit(str(err))
from datetime import datetime
from urllib.request import Request, urlopen
import subprocess as sp
import argparse
import logging
import json
import os, sys
import time

parser = argparse.ArgumentParser(description="Search images on google and change your background with feh")
parser.add_argument('search', type=str, action='store', help="search a image on google", nargs='*')
parser.add_argument('--clear', action='store_true', help='clear cache')
parser.add_argument('-r', '--recent', action='store_true', help='shows recent searches')
parser.add_argument('-v', '--verbosity', action='store_true', help='shows debugging information')
parser.add_argument('-d', action='store', type=int, help='delay in secunds between wallpapers default is 3s', default=3)
args = parser.parse_args()
search = '+'.join(args.search)    # if you search foo bar will be equal foo+bar

script_path = os.path.dirname(os.path.abspath(__file__)) # get the script absolute path
# logging config
LOG_FORMAT = "%(levelname)s %(asctime)s - %(message)s" 
log_path=os.path.join(script_path, 'info.log')
logging.basicConfig(filename=log_path, level=logging.INFO, format=LOG_FORMAT, filemode='w')
logger = logging.getLogger()

class Cache:

   def __init__(self):
      """ create cache if not exists """
      self.search_cache_path = os.path.join(script_path, 'cache/search_cache.json')
      if not os.path.exists(script_path + '/cache'):
         os.mkdir(script_path + '/cache')
      if not os.path.exists(self.search_cache_path) or args.clear:
         open(self.search_cache_path, 'w').close()
      
      if args.recent:
         # shows recent search cache
         recent_cache = self.load_cache()
         if recent_cache:
            [ print(k, end=' ') for k in recent_cache.keys() ]
         else:
            print('No recent cache.')

   def dump_cache(self, images):
      """ write cache """
      with open(self.search_cache_path, 'r') as fp:
         if os.stat(self.search_cache_path).st_size > 0: 
            search_cache = eval(json.load(fp))
            search_cache[search] = images
         else:
            search_cache = json.dumps({search: images})
         
      with open(self.search_cache_path, 'w') as fp:
         json.dump(str(search_cache), fp)

   def load_cache(self):
      """ return a list """
      if os.stat(self.search_cache_path).st_size > 0:
         with open(self.search_cache_path) as fp:
            return eval(json.load(fp))

   def update_cache(self, image):
      """ remove images from cache """
      cache = self.load_cache()
      try:
         cache[search].remove(image)
         return cache[search]
      except Exception as err:
         logger.error(err)

headers = {
   'User-Agent': 'Mozilla/5.0 (Windows NqT 6.1) AppleWebKit/537.36 \
   (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.3'
}

def get_soup():
   """ return html """
   url = 'https://www.google.com/search?q={}&source=lnms&tbm=isch'.format(search)
   try:
      req = Request(url, headers=headers)
      with urlopen(req) as html:
         return BeautifulSoup(html.read(), 'html.parser')
   except Exception as err:
      logger.error(err)

def search_image():
   """ search the specified image on google and return it """
   soup = get_soup()
   imgs = soup.find_all('div', {'class': 'rg_meta'})
   img_src = []
   for i in imgs:
      link, ext = eval(i.text)["ou"], eval(i.text)["ity"]
      resolution = str(eval(i.text)["ow"]) + "x" + str(eval(i.text)["oh"])
      if eval(i.text)["ow"] >= 800 and eval(i.text)["oh"] >= 600:
         # append if the image resolution is greater then 800x600
         img_src.append((link, ext, resolution))

   cache.dump_cache(img_src)
   return img_src

def set_background(image):
   """ set background using feh """
   link, img_format , resolution = image
   # the temporarily images are saved in /tmp/
   image_path = '/tmp/img_{}.{}'.format(datetime.now().strftime("%H%M_%y%m%d"), img_format)
   try:
      req = Request(link, headers=headers)
      with urlopen(req, timeout=15) as image:
         with open(image_path, 'wb') as f:
            f.write(image.read())
   except Exception as err:
      logger.error(err)
      return None

   try:
      out = sp.run(['feh', '--bg-scale', image_path], stdout=sp.DEVNULL, stderr=sp.DEVNULL, check=True)
   except sp.CalledProcessError as err:
      logger.error(err)
      return None

   delay = args.d
   time.sleep(delay)
   if os.path.exists(image_path):
      os.remove(image_path)
   return image_path

class Main():

   def __init__(self):
      search_cache = cache.load_cache()
      if search_cache != None:

         if search in search_cache:
            print('searching in the cache... ' + search.replace('+',' '))
            self.images = search_cache[search]
         else:
            print('searching on google... ' + search.replace('+',' '))
            self.images = search_image()
      else:
         print('searching on google... ' + search.replace('+',' '))
         self.images = search_image()
      logger.info('cache length: {}'.format(len(self.images)))   

   def run(self):
      try: 
         i = last_line = 0
         while True:
            image = self.images[i]
            current_image = set_background(image)
            if current_image == None:
               updated_cache = cache.update_cache(image)
               self.images.remove(image)
               logger.info('{} removed'.format(image))
               logger.info('cache length: {}'.format(len(self.images)))
            i += 1
            if i >= len(self.images) - 1:
               i = 0
            if args.verbosity:
               # verbosity enable
               f = open(log_path, 'r')   # read the log file
               f.seek(last_line)      # return to the last line
               if f.readline():      # if line is not empty. This will change the current position on log file
                  f.seek(last_line)   # return to the last line 
                  print(f.readline().replace('\n', ''))  # readline
                  last_line = f.tell()               # save where stoped
                  
      except KeyboardInterrupt:
         if os.path.exists(current_image):
            os.remove(current_image)
      except Exception as err:
         logger.error(err)

def check():
   assert (sys.platform in 'linux'), "This code runs on Linux only."
   try:
      sp.run(['which', 'feh'], check=True, stdout=sp.DEVNULL)
   except:
      sys.exit('feh isn\'t installed')
   if args.d < 0:
      raise ValueError('sleep length must be non-negative')

if __name__ == '__main__':
   check()
   cache = Cache()
   if args.search:
      Main().run()

Scripts recomendados

Método de Newton

Verificar se um número é primo

Procura músicas em diretório local

Interface para Qemu

Compressão de série numérica em Python


  

Comentários

Nenhum comentário foi encontrado.


Contribuir com comentário




Patrocínio

Site hospedado pelo provedor RedeHost.
Linux banner

Destaques

Artigos

Dicas

Tópicos

Top 10 do mês

Scripts