Skip to content

Instantly share code, notes, and snippets.

@ecarreras
Last active December 15, 2015 02:48
Show Gist options
  • Select an option

  • Save ecarreras/5189334 to your computer and use it in GitHub Desktop.

Select an option

Save ecarreras/5189334 to your computer and use it in GitHub Desktop.

Paralize your scripts with Redis

  1. Setup your redis server
  2. Install python-rq
  3. Clone this gist
  4. Start n workers
  5. Run tas_rq.py
  6. Wait workers do the job :)
from __future__ import division
from datetime import datetime
def execute(dbname, uid, obj, method, *args, **kw):
start = datetime.now()
# Disabling logging in OpenERP
import logging
logging.disable(logging.CRITICAL)
import netsvc
import tools
import pooler
tools.config['db_name'] = dbname
tools.config['addons_path'] = '/home/erpdev/src/erp/server/bin/addons'
import osv
import workflow
import report
import service
import sql_db
osv_ = osv.osv.osv_pool()
pooler.get_db_and_pool(dbname)
logging.disable(0)
logger = logging.getLogger()
logger.handlers = []
logging.basicConfig(logging.INFO)
res = osv_.execute(dbname, uid, obj, method, *args, **kw)
logger.info('Time elapsed: %s' % (datetime.now() - start))
sql_db.close_db(dbname)
return res
def make_chunks(ids, n_chunks=None, size=None):
"""Do chunks from ids.
We can make chunks either with number of chunks desired or size of every
chunk.
"""
if not n_chunks and not size:
raise ValueError("n_chunks or size must be passed")
if n_chunks and size:
raise ValueError("only n_chunks or size must be passed")
if not size:
size = int(ceil(len(ids) / n_chunks))
return [ids[x:x + size] for x in xrange(0, len(ids), size)]
from rq import Connection, Queue
from redis import Redis, from_url
import time
import sys
from task import execute, make_chunks
redis_conn = from_url('redis://localhost:6379')
q = Queue(connection=redis_conn)
dbname = sys.argv[1]
job = q.enqueue(execute, dbname, 1, 'account.move', 'search', [('state', '=', 'draft')])
res = job.result
while not res:
time.sleep(0.1)
res = job.result
chunks = make_chunks(res, size=50)
for j, chunk in enumerate(chunks):
job = q.enqueue(execute, dbname, 1, 'account.move', 'button_validate', chunk)
print "Enqueued job %i/%i" % (j + 1, len(chunks))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment