Skip to content

Commit 3752d09

Browse files
committed
[FIX] spreadsheet: batch process spreadsheet_revision.commands
Some dbs have `spreadsheet_revision` records with over 10 millions characters in `commands`. If the number of record is high, this leads to memory errors. We use a named cursor so the fetching is done in postgres and data are processed in smaller batches by the upgrade process.
1 parent 86409e5 commit 3752d09

File tree

1 file changed

+7
-3
lines changed

1 file changed

+7
-3
lines changed

src/util/spreadsheet/misc.py

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,13 @@
1-
from .. import json
1+
from .. import json, pg
2+
3+
BATCH_SIZE = 10
24

35

46
def iter_commands(cr, like_all=(), like_any=()):
57
if not (bool(like_all) ^ bool(like_any)):
68
raise ValueError("Please specify `like_all` or `like_any`, not both")
7-
cr.execute(
9+
ncr = pg.named_cursor(cr, itersize=BATCH_SIZE)
10+
ncr.execute(
811
"""
912
SELECT id,
1013
commands
@@ -13,7 +16,7 @@ def iter_commands(cr, like_all=(), like_any=()):
1316
""".format("ALL" if like_all else "ANY"),
1417
[list(like_all or like_any)],
1518
)
16-
for revision_id, data in cr.fetchall():
19+
for revision_id, data in ncr:
1720
data_loaded = json.loads(data)
1821
if "commands" not in data_loaded:
1922
continue
@@ -27,6 +30,7 @@ def iter_commands(cr, like_all=(), like_any=()):
2730
cr.execute(
2831
"UPDATE spreadsheet_revision SET commands=%s WHERE id=%s", [json.dumps(data_loaded), revision_id]
2932
)
33+
ncr.close()
3034

3135

3236
def process_commands(cr, callback, *args, **kwargs):

0 commit comments

Comments
 (0)