before send to remote
This commit is contained in:
6
env/lib/python3.8/site-packages/django/db/models/sql/__init__.py
vendored
Normal file
6
env/lib/python3.8/site-packages/django/db/models/sql/__init__.py
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
from django.db.models.sql.query import * # NOQA
|
||||
from django.db.models.sql.query import Query
|
||||
from django.db.models.sql.subqueries import * # NOQA
|
||||
from django.db.models.sql.where import AND, OR, XOR
|
||||
|
||||
__all__ = ["Query", "AND", "OR", "XOR"]
|
BIN
env/lib/python3.8/site-packages/django/db/models/sql/__pycache__/__init__.cpython-38.pyc
vendored
Normal file
BIN
env/lib/python3.8/site-packages/django/db/models/sql/__pycache__/__init__.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
env/lib/python3.8/site-packages/django/db/models/sql/__pycache__/compiler.cpython-38.pyc
vendored
Normal file
BIN
env/lib/python3.8/site-packages/django/db/models/sql/__pycache__/compiler.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
env/lib/python3.8/site-packages/django/db/models/sql/__pycache__/constants.cpython-38.pyc
vendored
Normal file
BIN
env/lib/python3.8/site-packages/django/db/models/sql/__pycache__/constants.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
env/lib/python3.8/site-packages/django/db/models/sql/__pycache__/datastructures.cpython-38.pyc
vendored
Normal file
BIN
env/lib/python3.8/site-packages/django/db/models/sql/__pycache__/datastructures.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
env/lib/python3.8/site-packages/django/db/models/sql/__pycache__/query.cpython-38.pyc
vendored
Normal file
BIN
env/lib/python3.8/site-packages/django/db/models/sql/__pycache__/query.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
env/lib/python3.8/site-packages/django/db/models/sql/__pycache__/subqueries.cpython-38.pyc
vendored
Normal file
BIN
env/lib/python3.8/site-packages/django/db/models/sql/__pycache__/subqueries.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
env/lib/python3.8/site-packages/django/db/models/sql/__pycache__/where.cpython-38.pyc
vendored
Normal file
BIN
env/lib/python3.8/site-packages/django/db/models/sql/__pycache__/where.cpython-38.pyc
vendored
Normal file
Binary file not shown.
1931
env/lib/python3.8/site-packages/django/db/models/sql/compiler.py
vendored
Normal file
1931
env/lib/python3.8/site-packages/django/db/models/sql/compiler.py
vendored
Normal file
File diff suppressed because it is too large
Load Diff
24
env/lib/python3.8/site-packages/django/db/models/sql/constants.py
vendored
Normal file
24
env/lib/python3.8/site-packages/django/db/models/sql/constants.py
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
"""
|
||||
Constants specific to the SQL storage portion of the ORM.
|
||||
"""
|
||||
|
||||
# Size of each "chunk" for get_iterator calls.
|
||||
# Larger values are slightly faster at the expense of more storage space.
|
||||
GET_ITERATOR_CHUNK_SIZE = 100
|
||||
|
||||
# Namedtuples for sql.* internal use.
|
||||
|
||||
# How many results to expect from a cursor.execute call
|
||||
MULTI = "multi"
|
||||
SINGLE = "single"
|
||||
CURSOR = "cursor"
|
||||
NO_RESULTS = "no results"
|
||||
|
||||
ORDER_DIR = {
|
||||
"ASC": ("ASC", "DESC"),
|
||||
"DESC": ("DESC", "ASC"),
|
||||
}
|
||||
|
||||
# SQL join types.
|
||||
INNER = "INNER JOIN"
|
||||
LOUTER = "LEFT OUTER JOIN"
|
220
env/lib/python3.8/site-packages/django/db/models/sql/datastructures.py
vendored
Normal file
220
env/lib/python3.8/site-packages/django/db/models/sql/datastructures.py
vendored
Normal file
@@ -0,0 +1,220 @@
|
||||
"""
|
||||
Useful auxiliary data structures for query construction. Not useful outside
|
||||
the SQL domain.
|
||||
"""
|
||||
from django.db.models.sql.constants import INNER, LOUTER
|
||||
|
||||
|
||||
class MultiJoin(Exception):
|
||||
"""
|
||||
Used by join construction code to indicate the point at which a
|
||||
multi-valued join was attempted (if the caller wants to treat that
|
||||
exceptionally).
|
||||
"""
|
||||
|
||||
def __init__(self, names_pos, path_with_names):
|
||||
self.level = names_pos
|
||||
# The path travelled, this includes the path to the multijoin.
|
||||
self.names_with_path = path_with_names
|
||||
|
||||
|
||||
class Empty:
|
||||
pass
|
||||
|
||||
|
||||
class Join:
|
||||
"""
|
||||
Used by sql.Query and sql.SQLCompiler to generate JOIN clauses into the
|
||||
FROM entry. For example, the SQL generated could be
|
||||
LEFT OUTER JOIN "sometable" T1
|
||||
ON ("othertable"."sometable_id" = "sometable"."id")
|
||||
|
||||
This class is primarily used in Query.alias_map. All entries in alias_map
|
||||
must be Join compatible by providing the following attributes and methods:
|
||||
- table_name (string)
|
||||
- table_alias (possible alias for the table, can be None)
|
||||
- join_type (can be None for those entries that aren't joined from
|
||||
anything)
|
||||
- parent_alias (which table is this join's parent, can be None similarly
|
||||
to join_type)
|
||||
- as_sql()
|
||||
- relabeled_clone()
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
table_name,
|
||||
parent_alias,
|
||||
table_alias,
|
||||
join_type,
|
||||
join_field,
|
||||
nullable,
|
||||
filtered_relation=None,
|
||||
):
|
||||
# Join table
|
||||
self.table_name = table_name
|
||||
self.parent_alias = parent_alias
|
||||
# Note: table_alias is not necessarily known at instantiation time.
|
||||
self.table_alias = table_alias
|
||||
# LOUTER or INNER
|
||||
self.join_type = join_type
|
||||
# A list of 2-tuples to use in the ON clause of the JOIN.
|
||||
# Each 2-tuple will create one join condition in the ON clause.
|
||||
self.join_cols = join_field.get_joining_columns()
|
||||
# Along which field (or ForeignObjectRel in the reverse join case)
|
||||
self.join_field = join_field
|
||||
# Is this join nullabled?
|
||||
self.nullable = nullable
|
||||
self.filtered_relation = filtered_relation
|
||||
|
||||
def as_sql(self, compiler, connection):
|
||||
"""
|
||||
Generate the full
|
||||
LEFT OUTER JOIN sometable ON sometable.somecol = othertable.othercol, params
|
||||
clause for this join.
|
||||
"""
|
||||
join_conditions = []
|
||||
params = []
|
||||
qn = compiler.quote_name_unless_alias
|
||||
qn2 = connection.ops.quote_name
|
||||
|
||||
# Add a join condition for each pair of joining columns.
|
||||
for lhs_col, rhs_col in self.join_cols:
|
||||
join_conditions.append(
|
||||
"%s.%s = %s.%s"
|
||||
% (
|
||||
qn(self.parent_alias),
|
||||
qn2(lhs_col),
|
||||
qn(self.table_alias),
|
||||
qn2(rhs_col),
|
||||
)
|
||||
)
|
||||
|
||||
# Add a single condition inside parentheses for whatever
|
||||
# get_extra_restriction() returns.
|
||||
extra_cond = self.join_field.get_extra_restriction(
|
||||
self.table_alias, self.parent_alias
|
||||
)
|
||||
if extra_cond:
|
||||
extra_sql, extra_params = compiler.compile(extra_cond)
|
||||
join_conditions.append("(%s)" % extra_sql)
|
||||
params.extend(extra_params)
|
||||
if self.filtered_relation:
|
||||
extra_sql, extra_params = compiler.compile(self.filtered_relation)
|
||||
if extra_sql:
|
||||
join_conditions.append("(%s)" % extra_sql)
|
||||
params.extend(extra_params)
|
||||
if not join_conditions:
|
||||
# This might be a rel on the other end of an actual declared field.
|
||||
declared_field = getattr(self.join_field, "field", self.join_field)
|
||||
raise ValueError(
|
||||
"Join generated an empty ON clause. %s did not yield either "
|
||||
"joining columns or extra restrictions." % declared_field.__class__
|
||||
)
|
||||
on_clause_sql = " AND ".join(join_conditions)
|
||||
alias_str = (
|
||||
"" if self.table_alias == self.table_name else (" %s" % self.table_alias)
|
||||
)
|
||||
sql = "%s %s%s ON (%s)" % (
|
||||
self.join_type,
|
||||
qn(self.table_name),
|
||||
alias_str,
|
||||
on_clause_sql,
|
||||
)
|
||||
return sql, params
|
||||
|
||||
def relabeled_clone(self, change_map):
|
||||
new_parent_alias = change_map.get(self.parent_alias, self.parent_alias)
|
||||
new_table_alias = change_map.get(self.table_alias, self.table_alias)
|
||||
if self.filtered_relation is not None:
|
||||
filtered_relation = self.filtered_relation.clone()
|
||||
filtered_relation.path = [
|
||||
change_map.get(p, p) for p in self.filtered_relation.path
|
||||
]
|
||||
else:
|
||||
filtered_relation = None
|
||||
return self.__class__(
|
||||
self.table_name,
|
||||
new_parent_alias,
|
||||
new_table_alias,
|
||||
self.join_type,
|
||||
self.join_field,
|
||||
self.nullable,
|
||||
filtered_relation=filtered_relation,
|
||||
)
|
||||
|
||||
@property
|
||||
def identity(self):
|
||||
return (
|
||||
self.__class__,
|
||||
self.table_name,
|
||||
self.parent_alias,
|
||||
self.join_field,
|
||||
self.filtered_relation,
|
||||
)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, Join):
|
||||
return NotImplemented
|
||||
return self.identity == other.identity
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.identity)
|
||||
|
||||
def equals(self, other):
|
||||
# Ignore filtered_relation in equality check.
|
||||
return self.identity[:-1] == other.identity[:-1]
|
||||
|
||||
def demote(self):
|
||||
new = self.relabeled_clone({})
|
||||
new.join_type = INNER
|
||||
return new
|
||||
|
||||
def promote(self):
|
||||
new = self.relabeled_clone({})
|
||||
new.join_type = LOUTER
|
||||
return new
|
||||
|
||||
|
||||
class BaseTable:
|
||||
"""
|
||||
The BaseTable class is used for base table references in FROM clause. For
|
||||
example, the SQL "foo" in
|
||||
SELECT * FROM "foo" WHERE somecond
|
||||
could be generated by this class.
|
||||
"""
|
||||
|
||||
join_type = None
|
||||
parent_alias = None
|
||||
filtered_relation = None
|
||||
|
||||
def __init__(self, table_name, alias):
|
||||
self.table_name = table_name
|
||||
self.table_alias = alias
|
||||
|
||||
def as_sql(self, compiler, connection):
|
||||
alias_str = (
|
||||
"" if self.table_alias == self.table_name else (" %s" % self.table_alias)
|
||||
)
|
||||
base_sql = compiler.quote_name_unless_alias(self.table_name)
|
||||
return base_sql + alias_str, []
|
||||
|
||||
def relabeled_clone(self, change_map):
|
||||
return self.__class__(
|
||||
self.table_name, change_map.get(self.table_alias, self.table_alias)
|
||||
)
|
||||
|
||||
@property
|
||||
def identity(self):
|
||||
return self.__class__, self.table_name, self.table_alias
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, BaseTable):
|
||||
return NotImplemented
|
||||
return self.identity == other.identity
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.identity)
|
||||
|
||||
def equals(self, other):
|
||||
return self.identity == other.identity
|
2691
env/lib/python3.8/site-packages/django/db/models/sql/query.py
vendored
Normal file
2691
env/lib/python3.8/site-packages/django/db/models/sql/query.py
vendored
Normal file
File diff suppressed because it is too large
Load Diff
171
env/lib/python3.8/site-packages/django/db/models/sql/subqueries.py
vendored
Normal file
171
env/lib/python3.8/site-packages/django/db/models/sql/subqueries.py
vendored
Normal file
@@ -0,0 +1,171 @@
|
||||
"""
|
||||
Query subclasses which provide extra functionality beyond simple data retrieval.
|
||||
"""
|
||||
|
||||
from django.core.exceptions import FieldError
|
||||
from django.db.models.sql.constants import CURSOR, GET_ITERATOR_CHUNK_SIZE, NO_RESULTS
|
||||
from django.db.models.sql.query import Query
|
||||
|
||||
__all__ = ["DeleteQuery", "UpdateQuery", "InsertQuery", "AggregateQuery"]
|
||||
|
||||
|
||||
class DeleteQuery(Query):
|
||||
"""A DELETE SQL query."""
|
||||
|
||||
compiler = "SQLDeleteCompiler"
|
||||
|
||||
def do_query(self, table, where, using):
|
||||
self.alias_map = {table: self.alias_map[table]}
|
||||
self.where = where
|
||||
cursor = self.get_compiler(using).execute_sql(CURSOR)
|
||||
if cursor:
|
||||
with cursor:
|
||||
return cursor.rowcount
|
||||
return 0
|
||||
|
||||
def delete_batch(self, pk_list, using):
|
||||
"""
|
||||
Set up and execute delete queries for all the objects in pk_list.
|
||||
|
||||
More than one physical query may be executed if there are a
|
||||
lot of values in pk_list.
|
||||
"""
|
||||
# number of objects deleted
|
||||
num_deleted = 0
|
||||
field = self.get_meta().pk
|
||||
for offset in range(0, len(pk_list), GET_ITERATOR_CHUNK_SIZE):
|
||||
self.clear_where()
|
||||
self.add_filter(
|
||||
f"{field.attname}__in",
|
||||
pk_list[offset : offset + GET_ITERATOR_CHUNK_SIZE],
|
||||
)
|
||||
num_deleted += self.do_query(
|
||||
self.get_meta().db_table, self.where, using=using
|
||||
)
|
||||
return num_deleted
|
||||
|
||||
|
||||
class UpdateQuery(Query):
|
||||
"""An UPDATE SQL query."""
|
||||
|
||||
compiler = "SQLUpdateCompiler"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self._setup_query()
|
||||
|
||||
def _setup_query(self):
|
||||
"""
|
||||
Run on initialization and at the end of chaining. Any attributes that
|
||||
would normally be set in __init__() should go here instead.
|
||||
"""
|
||||
self.values = []
|
||||
self.related_ids = None
|
||||
self.related_updates = {}
|
||||
|
||||
def clone(self):
|
||||
obj = super().clone()
|
||||
obj.related_updates = self.related_updates.copy()
|
||||
return obj
|
||||
|
||||
def update_batch(self, pk_list, values, using):
|
||||
self.add_update_values(values)
|
||||
for offset in range(0, len(pk_list), GET_ITERATOR_CHUNK_SIZE):
|
||||
self.clear_where()
|
||||
self.add_filter(
|
||||
"pk__in", pk_list[offset : offset + GET_ITERATOR_CHUNK_SIZE]
|
||||
)
|
||||
self.get_compiler(using).execute_sql(NO_RESULTS)
|
||||
|
||||
def add_update_values(self, values):
|
||||
"""
|
||||
Convert a dictionary of field name to value mappings into an update
|
||||
query. This is the entry point for the public update() method on
|
||||
querysets.
|
||||
"""
|
||||
values_seq = []
|
||||
for name, val in values.items():
|
||||
field = self.get_meta().get_field(name)
|
||||
direct = (
|
||||
not (field.auto_created and not field.concrete) or not field.concrete
|
||||
)
|
||||
model = field.model._meta.concrete_model
|
||||
if not direct or (field.is_relation and field.many_to_many):
|
||||
raise FieldError(
|
||||
"Cannot update model field %r (only non-relations and "
|
||||
"foreign keys permitted)." % field
|
||||
)
|
||||
if model is not self.get_meta().concrete_model:
|
||||
self.add_related_update(model, field, val)
|
||||
continue
|
||||
values_seq.append((field, model, val))
|
||||
return self.add_update_fields(values_seq)
|
||||
|
||||
def add_update_fields(self, values_seq):
|
||||
"""
|
||||
Append a sequence of (field, model, value) triples to the internal list
|
||||
that will be used to generate the UPDATE query. Might be more usefully
|
||||
called add_update_targets() to hint at the extra information here.
|
||||
"""
|
||||
for field, model, val in values_seq:
|
||||
if hasattr(val, "resolve_expression"):
|
||||
# Resolve expressions here so that annotations are no longer needed
|
||||
val = val.resolve_expression(self, allow_joins=False, for_save=True)
|
||||
self.values.append((field, model, val))
|
||||
|
||||
def add_related_update(self, model, field, value):
|
||||
"""
|
||||
Add (name, value) to an update query for an ancestor model.
|
||||
|
||||
Update are coalesced so that only one update query per ancestor is run.
|
||||
"""
|
||||
self.related_updates.setdefault(model, []).append((field, None, value))
|
||||
|
||||
def get_related_updates(self):
|
||||
"""
|
||||
Return a list of query objects: one for each update required to an
|
||||
ancestor model. Each query will have the same filtering conditions as
|
||||
the current query but will only update a single table.
|
||||
"""
|
||||
if not self.related_updates:
|
||||
return []
|
||||
result = []
|
||||
for model, values in self.related_updates.items():
|
||||
query = UpdateQuery(model)
|
||||
query.values = values
|
||||
if self.related_ids is not None:
|
||||
query.add_filter("pk__in", self.related_ids[model])
|
||||
result.append(query)
|
||||
return result
|
||||
|
||||
|
||||
class InsertQuery(Query):
|
||||
compiler = "SQLInsertCompiler"
|
||||
|
||||
def __init__(
|
||||
self, *args, on_conflict=None, update_fields=None, unique_fields=None, **kwargs
|
||||
):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.fields = []
|
||||
self.objs = []
|
||||
self.on_conflict = on_conflict
|
||||
self.update_fields = update_fields or []
|
||||
self.unique_fields = unique_fields or []
|
||||
|
||||
def insert_values(self, fields, objs, raw=False):
|
||||
self.fields = fields
|
||||
self.objs = objs
|
||||
self.raw = raw
|
||||
|
||||
|
||||
class AggregateQuery(Query):
|
||||
"""
|
||||
Take another query as a parameter to the FROM clause and only select the
|
||||
elements in the provided list.
|
||||
"""
|
||||
|
||||
compiler = "SQLAggregateCompiler"
|
||||
|
||||
def __init__(self, model, inner_query):
|
||||
self.inner_query = inner_query
|
||||
super().__init__(model)
|
300
env/lib/python3.8/site-packages/django/db/models/sql/where.py
vendored
Normal file
300
env/lib/python3.8/site-packages/django/db/models/sql/where.py
vendored
Normal file
@@ -0,0 +1,300 @@
|
||||
"""
|
||||
Code to manage the creation and SQL rendering of 'where' constraints.
|
||||
"""
|
||||
import operator
|
||||
from functools import reduce
|
||||
|
||||
from django.core.exceptions import EmptyResultSet
|
||||
from django.db.models.expressions import Case, When
|
||||
from django.db.models.lookups import Exact
|
||||
from django.utils import tree
|
||||
from django.utils.functional import cached_property
|
||||
|
||||
# Connection types
|
||||
AND = "AND"
|
||||
OR = "OR"
|
||||
XOR = "XOR"
|
||||
|
||||
|
||||
class WhereNode(tree.Node):
|
||||
"""
|
||||
An SQL WHERE clause.
|
||||
|
||||
The class is tied to the Query class that created it (in order to create
|
||||
the correct SQL).
|
||||
|
||||
A child is usually an expression producing boolean values. Most likely the
|
||||
expression is a Lookup instance.
|
||||
|
||||
However, a child could also be any class with as_sql() and either
|
||||
relabeled_clone() method or relabel_aliases() and clone() methods and
|
||||
contains_aggregate attribute.
|
||||
"""
|
||||
|
||||
default = AND
|
||||
resolved = False
|
||||
conditional = True
|
||||
|
||||
def split_having(self, negated=False):
|
||||
"""
|
||||
Return two possibly None nodes: one for those parts of self that
|
||||
should be included in the WHERE clause and one for those parts of
|
||||
self that must be included in the HAVING clause.
|
||||
"""
|
||||
if not self.contains_aggregate:
|
||||
return self, None
|
||||
in_negated = negated ^ self.negated
|
||||
# If the effective connector is OR or XOR and this node contains an
|
||||
# aggregate, then we need to push the whole branch to HAVING clause.
|
||||
may_need_split = (
|
||||
(in_negated and self.connector == AND)
|
||||
or (not in_negated and self.connector == OR)
|
||||
or self.connector == XOR
|
||||
)
|
||||
if may_need_split and self.contains_aggregate:
|
||||
return None, self
|
||||
where_parts = []
|
||||
having_parts = []
|
||||
for c in self.children:
|
||||
if hasattr(c, "split_having"):
|
||||
where_part, having_part = c.split_having(in_negated)
|
||||
if where_part is not None:
|
||||
where_parts.append(where_part)
|
||||
if having_part is not None:
|
||||
having_parts.append(having_part)
|
||||
elif c.contains_aggregate:
|
||||
having_parts.append(c)
|
||||
else:
|
||||
where_parts.append(c)
|
||||
having_node = (
|
||||
self.__class__(having_parts, self.connector, self.negated)
|
||||
if having_parts
|
||||
else None
|
||||
)
|
||||
where_node = (
|
||||
self.__class__(where_parts, self.connector, self.negated)
|
||||
if where_parts
|
||||
else None
|
||||
)
|
||||
return where_node, having_node
|
||||
|
||||
def as_sql(self, compiler, connection):
|
||||
"""
|
||||
Return the SQL version of the where clause and the value to be
|
||||
substituted in. Return '', [] if this node matches everything,
|
||||
None, [] if this node is empty, and raise EmptyResultSet if this
|
||||
node can't match anything.
|
||||
"""
|
||||
result = []
|
||||
result_params = []
|
||||
if self.connector == AND:
|
||||
full_needed, empty_needed = len(self.children), 1
|
||||
else:
|
||||
full_needed, empty_needed = 1, len(self.children)
|
||||
|
||||
if self.connector == XOR and not connection.features.supports_logical_xor:
|
||||
# Convert if the database doesn't support XOR:
|
||||
# a XOR b XOR c XOR ...
|
||||
# to:
|
||||
# (a OR b OR c OR ...) AND (a + b + c + ...) == 1
|
||||
lhs = self.__class__(self.children, OR)
|
||||
rhs_sum = reduce(
|
||||
operator.add,
|
||||
(Case(When(c, then=1), default=0) for c in self.children),
|
||||
)
|
||||
rhs = Exact(1, rhs_sum)
|
||||
return self.__class__([lhs, rhs], AND, self.negated).as_sql(
|
||||
compiler, connection
|
||||
)
|
||||
|
||||
for child in self.children:
|
||||
try:
|
||||
sql, params = compiler.compile(child)
|
||||
except EmptyResultSet:
|
||||
empty_needed -= 1
|
||||
else:
|
||||
if sql:
|
||||
result.append(sql)
|
||||
result_params.extend(params)
|
||||
else:
|
||||
full_needed -= 1
|
||||
# Check if this node matches nothing or everything.
|
||||
# First check the amount of full nodes and empty nodes
|
||||
# to make this node empty/full.
|
||||
# Now, check if this node is full/empty using the
|
||||
# counts.
|
||||
if empty_needed == 0:
|
||||
if self.negated:
|
||||
return "", []
|
||||
else:
|
||||
raise EmptyResultSet
|
||||
if full_needed == 0:
|
||||
if self.negated:
|
||||
raise EmptyResultSet
|
||||
else:
|
||||
return "", []
|
||||
conn = " %s " % self.connector
|
||||
sql_string = conn.join(result)
|
||||
if sql_string:
|
||||
if self.negated:
|
||||
# Some backends (Oracle at least) need parentheses
|
||||
# around the inner SQL in the negated case, even if the
|
||||
# inner SQL contains just a single expression.
|
||||
sql_string = "NOT (%s)" % sql_string
|
||||
elif len(result) > 1 or self.resolved:
|
||||
sql_string = "(%s)" % sql_string
|
||||
return sql_string, result_params
|
||||
|
||||
def get_group_by_cols(self, alias=None):
|
||||
cols = []
|
||||
for child in self.children:
|
||||
cols.extend(child.get_group_by_cols())
|
||||
return cols
|
||||
|
||||
def get_source_expressions(self):
|
||||
return self.children[:]
|
||||
|
||||
def set_source_expressions(self, children):
|
||||
assert len(children) == len(self.children)
|
||||
self.children = children
|
||||
|
||||
def relabel_aliases(self, change_map):
|
||||
"""
|
||||
Relabel the alias values of any children. 'change_map' is a dictionary
|
||||
mapping old (current) alias values to the new values.
|
||||
"""
|
||||
for pos, child in enumerate(self.children):
|
||||
if hasattr(child, "relabel_aliases"):
|
||||
# For example another WhereNode
|
||||
child.relabel_aliases(change_map)
|
||||
elif hasattr(child, "relabeled_clone"):
|
||||
self.children[pos] = child.relabeled_clone(change_map)
|
||||
|
||||
def clone(self):
|
||||
"""
|
||||
Create a clone of the tree. Must only be called on root nodes (nodes
|
||||
with empty subtree_parents). Childs must be either (Constraint, lookup,
|
||||
value) tuples, or objects supporting .clone().
|
||||
"""
|
||||
clone = self.__class__._new_instance(
|
||||
children=None,
|
||||
connector=self.connector,
|
||||
negated=self.negated,
|
||||
)
|
||||
for child in self.children:
|
||||
if hasattr(child, "clone"):
|
||||
clone.children.append(child.clone())
|
||||
else:
|
||||
clone.children.append(child)
|
||||
return clone
|
||||
|
||||
def relabeled_clone(self, change_map):
|
||||
clone = self.clone()
|
||||
clone.relabel_aliases(change_map)
|
||||
return clone
|
||||
|
||||
def copy(self):
|
||||
return self.clone()
|
||||
|
||||
@classmethod
|
||||
def _contains_aggregate(cls, obj):
|
||||
if isinstance(obj, tree.Node):
|
||||
return any(cls._contains_aggregate(c) for c in obj.children)
|
||||
return obj.contains_aggregate
|
||||
|
||||
@cached_property
|
||||
def contains_aggregate(self):
|
||||
return self._contains_aggregate(self)
|
||||
|
||||
@classmethod
|
||||
def _contains_over_clause(cls, obj):
|
||||
if isinstance(obj, tree.Node):
|
||||
return any(cls._contains_over_clause(c) for c in obj.children)
|
||||
return obj.contains_over_clause
|
||||
|
||||
@cached_property
|
||||
def contains_over_clause(self):
|
||||
return self._contains_over_clause(self)
|
||||
|
||||
@staticmethod
|
||||
def _resolve_leaf(expr, query, *args, **kwargs):
|
||||
if hasattr(expr, "resolve_expression"):
|
||||
expr = expr.resolve_expression(query, *args, **kwargs)
|
||||
return expr
|
||||
|
||||
@classmethod
|
||||
def _resolve_node(cls, node, query, *args, **kwargs):
|
||||
if hasattr(node, "children"):
|
||||
for child in node.children:
|
||||
cls._resolve_node(child, query, *args, **kwargs)
|
||||
if hasattr(node, "lhs"):
|
||||
node.lhs = cls._resolve_leaf(node.lhs, query, *args, **kwargs)
|
||||
if hasattr(node, "rhs"):
|
||||
node.rhs = cls._resolve_leaf(node.rhs, query, *args, **kwargs)
|
||||
|
||||
def resolve_expression(self, *args, **kwargs):
|
||||
clone = self.clone()
|
||||
clone._resolve_node(clone, *args, **kwargs)
|
||||
clone.resolved = True
|
||||
return clone
|
||||
|
||||
@cached_property
|
||||
def output_field(self):
|
||||
from django.db.models import BooleanField
|
||||
|
||||
return BooleanField()
|
||||
|
||||
def select_format(self, compiler, sql, params):
|
||||
# Wrap filters with a CASE WHEN expression if a database backend
|
||||
# (e.g. Oracle) doesn't support boolean expression in SELECT or GROUP
|
||||
# BY list.
|
||||
if not compiler.connection.features.supports_boolean_expr_in_select_clause:
|
||||
sql = f"CASE WHEN {sql} THEN 1 ELSE 0 END"
|
||||
return sql, params
|
||||
|
||||
def get_db_converters(self, connection):
|
||||
return self.output_field.get_db_converters(connection)
|
||||
|
||||
def get_lookup(self, lookup):
|
||||
return self.output_field.get_lookup(lookup)
|
||||
|
||||
|
||||
class NothingNode:
|
||||
"""A node that matches nothing."""
|
||||
|
||||
contains_aggregate = False
|
||||
|
||||
def as_sql(self, compiler=None, connection=None):
|
||||
raise EmptyResultSet
|
||||
|
||||
|
||||
class ExtraWhere:
|
||||
# The contents are a black box - assume no aggregates are used.
|
||||
contains_aggregate = False
|
||||
|
||||
def __init__(self, sqls, params):
|
||||
self.sqls = sqls
|
||||
self.params = params
|
||||
|
||||
def as_sql(self, compiler=None, connection=None):
|
||||
sqls = ["(%s)" % sql for sql in self.sqls]
|
||||
return " AND ".join(sqls), list(self.params or ())
|
||||
|
||||
|
||||
class SubqueryConstraint:
|
||||
# Even if aggregates would be used in a subquery, the outer query isn't
|
||||
# interested about those.
|
||||
contains_aggregate = False
|
||||
|
||||
def __init__(self, alias, columns, targets, query_object):
|
||||
self.alias = alias
|
||||
self.columns = columns
|
||||
self.targets = targets
|
||||
query_object.clear_ordering(clear_default=True)
|
||||
self.query_object = query_object
|
||||
|
||||
def as_sql(self, compiler, connection):
|
||||
query = self.query_object
|
||||
query.set_values(self.targets)
|
||||
query_compiler = query.get_compiler(connection=connection)
|
||||
return query_compiler.as_subquery_condition(self.alias, self.columns, compiler)
|
Reference in New Issue
Block a user