Commit 1c4ebcf2 authored by Alexander Pace's avatar Alexander Pace
Browse files

testing auto_increment changes

parent f7d371a5
Pipeline #211989 passed with stages
in 16 minutes and 28 seconds
......@@ -19,8 +19,7 @@ workers = multiprocessing.cpu_count()*2 + 1
# Worker class.
#
worker_class = 'sync'
#threads = 2
worker_class = 'gevent'
# Adding options for timeout. Not specified, the timeout default
# is 30 seconds. Source:
......
from collections import OrderedDict
import logging
import re
from django.db import models, connection, IntegrityError
from django.db.models import Q, Max
from django.utils import six
from django.contrib.auth import get_user_model
from django.core.exceptions import ValidationError
from django.db.models import QuerySet
from django.forms.models import model_to_dict
from django.utils.translation import ugettext_lazy as _
from core.vfile import VersionedFile
# Testing waiting:
from time import sleep
from random import random
# Set up user model
UserModel = get_user_model()
......@@ -77,6 +80,7 @@ class AutoIncrementModel(models.Model):
if not isinstance(self.AUTO_CONSTRAINTS, (tuple, list)):
raise TypeError(_('AUTO_CONSTRAINTS should be a tuple or list'))
# Get some useful information
meta = self.__class__._meta
current_class = self.__class__
......@@ -109,37 +113,38 @@ class AutoIncrementModel(models.Model):
for const in self.AUTO_CONSTRAINTS:
const_query = const_query & Q(**{const: getattr(self, const)})
# get the queryset that meets the constraints:
qs = current_class.objects.filter(const_query)
# Set AUTO_FIELD to one-plus-the-maximum of the value of AUTO_FIELD
# in the constained set. Note that since some superevents get removed
# or other circumstances, the max does not always equal to the number
# entries in the set. This caused some db integrity errors in testing.
if qs:
setattr(self, self.AUTO_FIELD,
qs.aggregate(max_val=Max(self.AUTO_FIELD))['max_val'] + 1)
else:
setattr(self, self.AUTO_FIELD, 1)
# Save object and check constraints. Add ability to retry on the event
# of DB Integrity errors.
this_try = 0
while this_try < self.max_retries:
number_of_tries = 0
success = False
while not success:
try:
self.full_clean()
except:
logger.warning("Database integrity error when saving {}. ",
"Incrementing and retrying.".format(self))
setattr(self, self.AUTO_FIELD,
getattr(self,self.AUTO_FIELD) + 1)
this_try += 1
else:
setattr(self, self.AUTO_FIELD,
self.updated_autofield_value(current_class, const_query))
super(AutoIncrementModel, self).save(*args, **kwargs)
break
except (IntegrityError, ValidationError):
logger.warning("Sleeping to stabilize database. try= {}, object={}".format(number_of_tries, self))
sleep(random())
number_of_tries += 1
if number_of_tries > 3:
raise
else:
success = True
def updated_autofield_value(self, current_class, query):
# get the queryset:
query_set = current_class.objects.filter(query)
if query_set:
return query_set.aggregate(max_val=Max(self.AUTO_FIELD))['max_val'] + 1
else:
return 1
def auto_increment_update(self, update_field_name, constraints=[],
allow_update_to_nonnull=False):
......
......@@ -13,6 +13,7 @@ djangorestframework==3.12.2
djangorestframework-guardian==0.3.0
dnspython==1.15.0
flake8==3.8.0
gevent==21.1.2
gunicorn==20.0.4
html5lib==1.1.0
ipdb==0.13.6
......
  • okay so note to self: https://github.com/gevent/gevent/issues/1268

    gevent is fast and stable when lvalerts are turned off, but fails pretty epically when they're turned on due to an incompatibility with python's multiprocessing module. this might be worth revisiting when the lvalert client and lvalert_overseer get rewritten (soon).

Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment