A possible solution:
class Bar:
# fields...
@classmethod
def _validate_unique(cls, self):
try:
obj = cls._default_manager.get(attr1 = self.attr1, attr3 = self.attr3)
if not obj == self:
raise IntegrityError('Duplicate')
except cls.DoesNotExist:
pass
def clean(self):
self._validate_unique(self)
super(Bar, self).clean()
I think a ManyToMany relation with a custom through table and an unique_together constraint on that table should do what you want.,Is there any more elegant way to add a value sensitive unique together constraint in Django Model?,For Django 2.2+ it is recommended to use UniqueConstraint. In the docs there is a note stating unique_together may be deprecated in the future. See this post for its usage.,Django model unique_together on primary key and unique constraints
Example code:
from django.db.models
import Model, ForeignKey, ManyToManyField, CharField
class Option(Model):
name = CharField()
class Thing(TimeStampedModel):
options = ManyToManyField("Option", through = "ThingOption")
class ThingOption(Model):
thing = ForeignKey(Thing)
option = ForeignKey(Option)
value = CharField()
class Meta:
unique_together = ('thing', 'option')
You can override create method, do something like
from django.db import models class MyModelManager(models.Manager): def create(self, * obj_data): # Do some extra stuff here on the submitted data before saving... # Ex - If obj_data[0] == "eggs" and obj_data[1] == "spam" is True don 't allow it for your blah reason # Call the super method which does the actual creation return super().create( * obj_data) # Python 3 syntax!! class MyModel(models.model): option_1 = models.CharField(max_length = 64) option_2 = models.CharField(max_length = 64) objects = MyModelManager()
JavaScript typed arrays
const o = {
a: 1,
b: 2,
// __proto__ sets the [[Prototype]]. It's specified here
// as another object literal.
__proto__: {
b: 3,
c: 4,
},
};
// o.[[Prototype]] has properties b and c.
// o.[[Prototype]].[[Prototype]] is Object.prototype (we will explain
// what that means later).
// Finally, o.[[Prototype]].[[Prototype]].[[Prototype]] is null.
// This is the end of the prototype chain, as null,
// by definition, has no [[Prototype]].
// Thus, the full prototype chain looks like:
// { a: 1, b: 2 } ---> { b: 3, c: 4 } ---> Object.prototype ---> null
console.log(o.a); // 1
// Is there an 'a' own property on o? Yes, and its value is 1.
console.log(o.b); // 2
// Is there a 'b' own property on o? Yes, and its value is 2.
// The prototype also has a 'b' property, but it's not visited.
// This is called Property Shadowing
console.log(o.c); // 4
// Is there a 'c' own property on o? No, check its prototype.
// Is there a 'c' own property on o.[[Prototype]]? Yes, its value is 4.
console.log(o.d); // undefined
// Is there a 'd' own property on o? No, check its prototype.
// Is there a 'd' own property on o.[[Prototype]]? No, check its prototype.
// o.[[Prototype]].[[Prototype]] is Object.prototype and
// there is no 'd' property by default, check its prototype.
// o.[[Prototype]].[[Prototype]].[[Prototype]] is null, stop searching,
// no property found, return undefined.
const o = {
a: 1,
b: 2,
// __proto__ sets the [[Prototype]]. It's specified here
// as another object literal.
__proto__: {
b: 3,
c: 4,
__proto__: {
d: 5,
},
},
};
// { a: 1, b: 2 } ---> { b: 3, c: 4 } ---> { d: 5 } ---> Object.prototype ---> null
console.log(o.d); // 5
const parent = {
value: 2,
method() {
return this.value + 1;
}
};
console.log(parent.method()); // 3
// When calling parent.m in this case, 'this' refers to parent
// child is an object that inherits from parent
const child = {
__proto__: parent,
};
console.log(child.method()); // 3
// When child.method is called, 'this' refers to child.
// So when child inherits the method of parent,
// The property 'value' is sought on child. However, since child
// doesn't have an own property called 'value', the property is
// found on the [[Prototype]], which is parent.value.
child.value = 4; // assign the value 4 to the property 'value' on child.
// This shadows the 'value' property on parent.
// The child object now looks like:
// { value: 4, __proto__: { value: 2, method: [Function] } }
console.log(child.method()); // 5
// Since child now has the 'value' property, 'this.value' means
// child.value instead
const boxes = [{
value: 1,
getValue() {
return this.value;
}
},
{
value: 2,
getValue() {
return this.value;
}
},
{
value: 3,
getValue() {
return this.value;
}
},
];
const boxPrototype = {
getValue() {
return this.value;
},
};
const boxes = [{
value: 1,
__proto__: boxPrototype
},
{
value: 2,
__proto__: boxPrototype
},
{
value: 3,
__proto__: boxPrototype
},
];
// A constructor function
function Box(value) {
this.value = value;
}
// Properties all boxes created from the Box() constructor
// will have
Box.prototype.getValue = function() {
return this.value;
};
const boxes = [
new Box(1),
new Box(2),
new Box(3),
];
Note that in the case of identical date values, these methods will use the primary key as a tie-breaker. This guarantees that no records are skipped or duplicated. That also means you cannot use those methods on unsaved objects.,All non-deferred fields of the model are updated to the values currently present in the database.,If a model has an AutoField — an auto-incrementing primary key — then that auto-incremented value will be calculated and saved as an attribute on your object the first time you call save():,Below is an example showing how to record the initial values of fields that are loaded from the database:
from django.db
import models
class Book(models.Model):
title = models.CharField(max_length = 100)
@classmethod
def create(cls, title):
book = cls(title = title)
# do something with the book
return book
book = Book.create("Pride and Prejudice")
class BookManager(models.Manager):
def create_book(self, title):
book = self.create(title = title)
# do something with the book
return book
class Book(models.Model):
title = models.CharField(max_length = 100)
objects = BookManager()
book = Book.objects.create_book("Pride and Prejudice")
from django.db.models import DEFERRED @classmethod def from_db(cls, db, field_names, values): # Default implementation of from_db()(subject to change and could # be replaced with super()). if len(values) != len(cls._meta.concrete_fields): values = list(values) values.reverse() values = [ values.pop() if f.attname in field_names else DEFERRED for f in cls._meta.concrete_fields ] instance = cls( * values) instance._state.adding = False instance._state.db = db # customization to store the original field values on the instance instance._loaded_values = dict( zip(field_names, (value for value in values if value is not DEFERRED)) ) return instance def save(self, * args, ** kwargs): # Check how the current values differ from._loaded_values.For example, # prevent changing the creator_id of the model.(This example doesn 't # support cases where 'creator_id' is deferred). if not self._state.adding and( self.creator_id != self._loaded_values['creator_id']): raise ValueError("Updating the value of creator isn't allowed") super().save( * args, ** kwargs)
>>> obj = MyModel.objects.first() >>> del obj.field >>> obj.field # Loads the field from the database
def test_update_result(self): obj = MyModel.objects.create(val = 1) MyModel.objects.filter(pk = obj.pk).update(val = F('val') + 1) # At this point obj.val is still 1, but the value in the database # was updated to 2. The object 's updated value needs to be reloaded # from the database. obj.refresh_from_db() self.assertEqual(obj.val, 2)
class ExampleModel(models.Model):
def refresh_from_db(self, using = None, fields = None, ** kwargs):
# fields contains the name of the deferred field to be
# loaded.
if fields is not None:
fields = set(fields)
deferred_fields = self.get_deferred_fields()
# If any deferred field is going to be loaded
if fields.intersection(deferred_fields):
# then load all of them
fields = fields.union(deferred_fields)
super().refresh_from_db(using, fields, ** kwargs)