fix
This commit is contained in:
@@ -0,0 +1,263 @@
|
||||
import warnings
|
||||
|
||||
from ..helpers import quote_string, random_string, stringify_param_value
|
||||
from .commands import AsyncGraphCommands, GraphCommands
|
||||
from .edge import Edge # noqa
|
||||
from .node import Node # noqa
|
||||
from .path import Path # noqa
|
||||
|
||||
DB_LABELS = "DB.LABELS"
|
||||
DB_RAELATIONSHIPTYPES = "DB.RELATIONSHIPTYPES"
|
||||
DB_PROPERTYKEYS = "DB.PROPERTYKEYS"
|
||||
|
||||
|
||||
class Graph(GraphCommands):
|
||||
"""
|
||||
Graph, collection of nodes and edges.
|
||||
"""
|
||||
|
||||
def __init__(self, client, name=random_string()):
|
||||
"""
|
||||
Create a new graph.
|
||||
"""
|
||||
warnings.warn(
|
||||
DeprecationWarning(
|
||||
"RedisGraph support is deprecated as of Redis Stack 7.2 \
|
||||
(https://redis.com/blog/redisgraph-eol/)"
|
||||
)
|
||||
)
|
||||
self.NAME = name # Graph key
|
||||
self.client = client
|
||||
self.execute_command = client.execute_command
|
||||
|
||||
self.nodes = {}
|
||||
self.edges = []
|
||||
self._labels = [] # List of node labels.
|
||||
self._properties = [] # List of properties.
|
||||
self._relationship_types = [] # List of relation types.
|
||||
self.version = 0 # Graph version
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self.NAME
|
||||
|
||||
def _clear_schema(self):
|
||||
self._labels = []
|
||||
self._properties = []
|
||||
self._relationship_types = []
|
||||
|
||||
def _refresh_schema(self):
|
||||
self._clear_schema()
|
||||
self._refresh_labels()
|
||||
self._refresh_relations()
|
||||
self._refresh_attributes()
|
||||
|
||||
def _refresh_labels(self):
|
||||
lbls = self.labels()
|
||||
|
||||
# Unpack data.
|
||||
self._labels = [l[0] for _, l in enumerate(lbls)]
|
||||
|
||||
def _refresh_relations(self):
|
||||
rels = self.relationship_types()
|
||||
|
||||
# Unpack data.
|
||||
self._relationship_types = [r[0] for _, r in enumerate(rels)]
|
||||
|
||||
def _refresh_attributes(self):
|
||||
props = self.property_keys()
|
||||
|
||||
# Unpack data.
|
||||
self._properties = [p[0] for _, p in enumerate(props)]
|
||||
|
||||
def get_label(self, idx):
|
||||
"""
|
||||
Returns a label by it's index
|
||||
|
||||
Args:
|
||||
|
||||
idx:
|
||||
The index of the label
|
||||
"""
|
||||
try:
|
||||
label = self._labels[idx]
|
||||
except IndexError:
|
||||
# Refresh labels.
|
||||
self._refresh_labels()
|
||||
label = self._labels[idx]
|
||||
return label
|
||||
|
||||
def get_relation(self, idx):
|
||||
"""
|
||||
Returns a relationship type by it's index
|
||||
|
||||
Args:
|
||||
|
||||
idx:
|
||||
The index of the relation
|
||||
"""
|
||||
try:
|
||||
relationship_type = self._relationship_types[idx]
|
||||
except IndexError:
|
||||
# Refresh relationship types.
|
||||
self._refresh_relations()
|
||||
relationship_type = self._relationship_types[idx]
|
||||
return relationship_type
|
||||
|
||||
def get_property(self, idx):
|
||||
"""
|
||||
Returns a property by it's index
|
||||
|
||||
Args:
|
||||
|
||||
idx:
|
||||
The index of the property
|
||||
"""
|
||||
try:
|
||||
p = self._properties[idx]
|
||||
except IndexError:
|
||||
# Refresh properties.
|
||||
self._refresh_attributes()
|
||||
p = self._properties[idx]
|
||||
return p
|
||||
|
||||
def add_node(self, node):
|
||||
"""
|
||||
Adds a node to the graph.
|
||||
"""
|
||||
if node.alias is None:
|
||||
node.alias = random_string()
|
||||
self.nodes[node.alias] = node
|
||||
|
||||
def add_edge(self, edge):
|
||||
"""
|
||||
Adds an edge to the graph.
|
||||
"""
|
||||
if not (self.nodes[edge.src_node.alias] and self.nodes[edge.dest_node.alias]):
|
||||
raise AssertionError("Both edge's end must be in the graph")
|
||||
|
||||
self.edges.append(edge)
|
||||
|
||||
def _build_params_header(self, params):
|
||||
if params is None:
|
||||
return ""
|
||||
if not isinstance(params, dict):
|
||||
raise TypeError("'params' must be a dict")
|
||||
# Header starts with "CYPHER"
|
||||
params_header = "CYPHER "
|
||||
for key, value in params.items():
|
||||
params_header += str(key) + "=" + stringify_param_value(value) + " "
|
||||
return params_header
|
||||
|
||||
# Procedures.
|
||||
def call_procedure(self, procedure, *args, read_only=False, **kwagrs):
|
||||
args = [quote_string(arg) for arg in args]
|
||||
q = f"CALL {procedure}({','.join(args)})"
|
||||
|
||||
y = kwagrs.get("y", None)
|
||||
if y is not None:
|
||||
q += f"YIELD {','.join(y)}"
|
||||
|
||||
return self.query(q, read_only=read_only)
|
||||
|
||||
def labels(self):
|
||||
return self.call_procedure(DB_LABELS, read_only=True).result_set
|
||||
|
||||
def relationship_types(self):
|
||||
return self.call_procedure(DB_RAELATIONSHIPTYPES, read_only=True).result_set
|
||||
|
||||
def property_keys(self):
|
||||
return self.call_procedure(DB_PROPERTYKEYS, read_only=True).result_set
|
||||
|
||||
|
||||
class AsyncGraph(Graph, AsyncGraphCommands):
|
||||
"""Async version for Graph"""
|
||||
|
||||
async def _refresh_labels(self):
|
||||
lbls = await self.labels()
|
||||
|
||||
# Unpack data.
|
||||
self._labels = [l[0] for _, l in enumerate(lbls)]
|
||||
|
||||
async def _refresh_attributes(self):
|
||||
props = await self.property_keys()
|
||||
|
||||
# Unpack data.
|
||||
self._properties = [p[0] for _, p in enumerate(props)]
|
||||
|
||||
async def _refresh_relations(self):
|
||||
rels = await self.relationship_types()
|
||||
|
||||
# Unpack data.
|
||||
self._relationship_types = [r[0] for _, r in enumerate(rels)]
|
||||
|
||||
async def get_label(self, idx):
|
||||
"""
|
||||
Returns a label by it's index
|
||||
|
||||
Args:
|
||||
|
||||
idx:
|
||||
The index of the label
|
||||
"""
|
||||
try:
|
||||
label = self._labels[idx]
|
||||
except IndexError:
|
||||
# Refresh labels.
|
||||
await self._refresh_labels()
|
||||
label = self._labels[idx]
|
||||
return label
|
||||
|
||||
async def get_property(self, idx):
|
||||
"""
|
||||
Returns a property by it's index
|
||||
|
||||
Args:
|
||||
|
||||
idx:
|
||||
The index of the property
|
||||
"""
|
||||
try:
|
||||
p = self._properties[idx]
|
||||
except IndexError:
|
||||
# Refresh properties.
|
||||
await self._refresh_attributes()
|
||||
p = self._properties[idx]
|
||||
return p
|
||||
|
||||
async def get_relation(self, idx):
|
||||
"""
|
||||
Returns a relationship type by it's index
|
||||
|
||||
Args:
|
||||
|
||||
idx:
|
||||
The index of the relation
|
||||
"""
|
||||
try:
|
||||
relationship_type = self._relationship_types[idx]
|
||||
except IndexError:
|
||||
# Refresh relationship types.
|
||||
await self._refresh_relations()
|
||||
relationship_type = self._relationship_types[idx]
|
||||
return relationship_type
|
||||
|
||||
async def call_procedure(self, procedure, *args, read_only=False, **kwagrs):
|
||||
args = [quote_string(arg) for arg in args]
|
||||
q = f"CALL {procedure}({','.join(args)})"
|
||||
|
||||
y = kwagrs.get("y", None)
|
||||
if y is not None:
|
||||
f"YIELD {','.join(y)}"
|
||||
return await self.query(q, read_only=read_only)
|
||||
|
||||
async def labels(self):
|
||||
return (await self.call_procedure(DB_LABELS, read_only=True)).result_set
|
||||
|
||||
async def property_keys(self):
|
||||
return (await self.call_procedure(DB_PROPERTYKEYS, read_only=True)).result_set
|
||||
|
||||
async def relationship_types(self):
|
||||
return (
|
||||
await self.call_procedure(DB_RAELATIONSHIPTYPES, read_only=True)
|
||||
).result_set
|
||||
@@ -0,0 +1,313 @@
|
||||
from redis import DataError
|
||||
from redis.exceptions import ResponseError
|
||||
|
||||
from .exceptions import VersionMismatchException
|
||||
from .execution_plan import ExecutionPlan
|
||||
from .query_result import AsyncQueryResult, QueryResult
|
||||
|
||||
PROFILE_CMD = "GRAPH.PROFILE"
|
||||
RO_QUERY_CMD = "GRAPH.RO_QUERY"
|
||||
QUERY_CMD = "GRAPH.QUERY"
|
||||
DELETE_CMD = "GRAPH.DELETE"
|
||||
SLOWLOG_CMD = "GRAPH.SLOWLOG"
|
||||
CONFIG_CMD = "GRAPH.CONFIG"
|
||||
LIST_CMD = "GRAPH.LIST"
|
||||
EXPLAIN_CMD = "GRAPH.EXPLAIN"
|
||||
|
||||
|
||||
class GraphCommands:
|
||||
"""RedisGraph Commands"""
|
||||
|
||||
def commit(self):
|
||||
"""
|
||||
Create entire graph.
|
||||
"""
|
||||
if len(self.nodes) == 0 and len(self.edges) == 0:
|
||||
return None
|
||||
|
||||
query = "CREATE "
|
||||
for _, node in self.nodes.items():
|
||||
query += str(node) + ","
|
||||
|
||||
query += ",".join([str(edge) for edge in self.edges])
|
||||
|
||||
# Discard leading comma.
|
||||
if query[-1] == ",":
|
||||
query = query[:-1]
|
||||
|
||||
return self.query(query)
|
||||
|
||||
def query(self, q, params=None, timeout=None, read_only=False, profile=False):
|
||||
"""
|
||||
Executes a query against the graph.
|
||||
For more information see `GRAPH.QUERY <https://redis.io/commands/graph.query>`_. # noqa
|
||||
|
||||
Args:
|
||||
|
||||
q : str
|
||||
The query.
|
||||
params : dict
|
||||
Query parameters.
|
||||
timeout : int
|
||||
Maximum runtime for read queries in milliseconds.
|
||||
read_only : bool
|
||||
Executes a readonly query if set to True.
|
||||
profile : bool
|
||||
Return details on results produced by and time
|
||||
spent in each operation.
|
||||
"""
|
||||
|
||||
# maintain original 'q'
|
||||
query = q
|
||||
|
||||
# handle query parameters
|
||||
query = self._build_params_header(params) + query
|
||||
|
||||
# construct query command
|
||||
# ask for compact result-set format
|
||||
# specify known graph version
|
||||
if profile:
|
||||
cmd = PROFILE_CMD
|
||||
else:
|
||||
cmd = RO_QUERY_CMD if read_only else QUERY_CMD
|
||||
command = [cmd, self.name, query, "--compact"]
|
||||
|
||||
# include timeout is specified
|
||||
if isinstance(timeout, int):
|
||||
command.extend(["timeout", timeout])
|
||||
elif timeout is not None:
|
||||
raise Exception("Timeout argument must be a positive integer")
|
||||
|
||||
# issue query
|
||||
try:
|
||||
response = self.execute_command(*command)
|
||||
return QueryResult(self, response, profile)
|
||||
except ResponseError as e:
|
||||
if "unknown command" in str(e) and read_only:
|
||||
# `GRAPH.RO_QUERY` is unavailable in older versions.
|
||||
return self.query(q, params, timeout, read_only=False)
|
||||
raise e
|
||||
except VersionMismatchException as e:
|
||||
# client view over the graph schema is out of sync
|
||||
# set client version and refresh local schema
|
||||
self.version = e.version
|
||||
self._refresh_schema()
|
||||
# re-issue query
|
||||
return self.query(q, params, timeout, read_only)
|
||||
|
||||
def merge(self, pattern):
|
||||
"""
|
||||
Merge pattern.
|
||||
"""
|
||||
query = "MERGE "
|
||||
query += str(pattern)
|
||||
|
||||
return self.query(query)
|
||||
|
||||
def delete(self):
|
||||
"""
|
||||
Deletes graph.
|
||||
For more information see `DELETE <https://redis.io/commands/graph.delete>`_. # noqa
|
||||
"""
|
||||
self._clear_schema()
|
||||
return self.execute_command(DELETE_CMD, self.name)
|
||||
|
||||
# declared here, to override the built in redis.db.flush()
|
||||
def flush(self):
|
||||
"""
|
||||
Commit the graph and reset the edges and the nodes to zero length.
|
||||
"""
|
||||
self.commit()
|
||||
self.nodes = {}
|
||||
self.edges = []
|
||||
|
||||
def bulk(self, **kwargs):
|
||||
"""Internal only. Not supported."""
|
||||
raise NotImplementedError(
|
||||
"GRAPH.BULK is internal only. "
|
||||
"Use https://github.com/redisgraph/redisgraph-bulk-loader."
|
||||
)
|
||||
|
||||
def profile(self, query):
|
||||
"""
|
||||
Execute a query and produce an execution plan augmented with metrics
|
||||
for each operation's execution. Return a string representation of a
|
||||
query execution plan, with details on results produced by and time
|
||||
spent in each operation.
|
||||
For more information see `GRAPH.PROFILE <https://redis.io/commands/graph.profile>`_. # noqa
|
||||
"""
|
||||
return self.query(query, profile=True)
|
||||
|
||||
def slowlog(self):
|
||||
"""
|
||||
Get a list containing up to 10 of the slowest queries issued
|
||||
against the given graph ID.
|
||||
For more information see `GRAPH.SLOWLOG <https://redis.io/commands/graph.slowlog>`_. # noqa
|
||||
|
||||
Each item in the list has the following structure:
|
||||
1. A unix timestamp at which the log entry was processed.
|
||||
2. The issued command.
|
||||
3. The issued query.
|
||||
4. The amount of time needed for its execution, in milliseconds.
|
||||
"""
|
||||
return self.execute_command(SLOWLOG_CMD, self.name)
|
||||
|
||||
def config(self, name, value=None, set=False):
|
||||
"""
|
||||
Retrieve or update a RedisGraph configuration.
|
||||
For more information see `<https://redis.io/commands/graph.config-get/>`__.
|
||||
|
||||
Args:
|
||||
|
||||
name : str
|
||||
The name of the configuration
|
||||
value :
|
||||
The value we want to set (can be used only when `set` is on)
|
||||
set : bool
|
||||
Turn on to set a configuration. Default behavior is get.
|
||||
"""
|
||||
params = ["SET" if set else "GET", name]
|
||||
if value is not None:
|
||||
if set:
|
||||
params.append(value)
|
||||
else:
|
||||
raise DataError(
|
||||
"``value`` can be provided only when ``set`` is True"
|
||||
) # noqa
|
||||
return self.execute_command(CONFIG_CMD, *params)
|
||||
|
||||
def list_keys(self):
|
||||
"""
|
||||
Lists all graph keys in the keyspace.
|
||||
For more information see `GRAPH.LIST <https://redis.io/commands/graph.list>`_. # noqa
|
||||
"""
|
||||
return self.execute_command(LIST_CMD)
|
||||
|
||||
def execution_plan(self, query, params=None):
|
||||
"""
|
||||
Get the execution plan for given query,
|
||||
GRAPH.EXPLAIN returns an array of operations.
|
||||
|
||||
Args:
|
||||
query: the query that will be executed
|
||||
params: query parameters
|
||||
"""
|
||||
query = self._build_params_header(params) + query
|
||||
|
||||
plan = self.execute_command(EXPLAIN_CMD, self.name, query)
|
||||
if isinstance(plan[0], bytes):
|
||||
plan = [b.decode() for b in plan]
|
||||
return "\n".join(plan)
|
||||
|
||||
def explain(self, query, params=None):
|
||||
"""
|
||||
Get the execution plan for given query,
|
||||
GRAPH.EXPLAIN returns ExecutionPlan object.
|
||||
For more information see `GRAPH.EXPLAIN <https://redis.io/commands/graph.explain>`_. # noqa
|
||||
|
||||
Args:
|
||||
query: the query that will be executed
|
||||
params: query parameters
|
||||
"""
|
||||
query = self._build_params_header(params) + query
|
||||
|
||||
plan = self.execute_command(EXPLAIN_CMD, self.name, query)
|
||||
return ExecutionPlan(plan)
|
||||
|
||||
|
||||
class AsyncGraphCommands(GraphCommands):
|
||||
async def query(self, q, params=None, timeout=None, read_only=False, profile=False):
|
||||
"""
|
||||
Executes a query against the graph.
|
||||
For more information see `GRAPH.QUERY <https://oss.redis.com/redisgraph/master/commands/#graphquery>`_. # noqa
|
||||
|
||||
Args:
|
||||
|
||||
q : str
|
||||
The query.
|
||||
params : dict
|
||||
Query parameters.
|
||||
timeout : int
|
||||
Maximum runtime for read queries in milliseconds.
|
||||
read_only : bool
|
||||
Executes a readonly query if set to True.
|
||||
profile : bool
|
||||
Return details on results produced by and time
|
||||
spent in each operation.
|
||||
"""
|
||||
|
||||
# maintain original 'q'
|
||||
query = q
|
||||
|
||||
# handle query parameters
|
||||
query = self._build_params_header(params) + query
|
||||
|
||||
# construct query command
|
||||
# ask for compact result-set format
|
||||
# specify known graph version
|
||||
if profile:
|
||||
cmd = PROFILE_CMD
|
||||
else:
|
||||
cmd = RO_QUERY_CMD if read_only else QUERY_CMD
|
||||
command = [cmd, self.name, query, "--compact"]
|
||||
|
||||
# include timeout is specified
|
||||
if isinstance(timeout, int):
|
||||
command.extend(["timeout", timeout])
|
||||
elif timeout is not None:
|
||||
raise Exception("Timeout argument must be a positive integer")
|
||||
|
||||
# issue query
|
||||
try:
|
||||
response = await self.execute_command(*command)
|
||||
return await AsyncQueryResult().initialize(self, response, profile)
|
||||
except ResponseError as e:
|
||||
if "unknown command" in str(e) and read_only:
|
||||
# `GRAPH.RO_QUERY` is unavailable in older versions.
|
||||
return await self.query(q, params, timeout, read_only=False)
|
||||
raise e
|
||||
except VersionMismatchException as e:
|
||||
# client view over the graph schema is out of sync
|
||||
# set client version and refresh local schema
|
||||
self.version = e.version
|
||||
self._refresh_schema()
|
||||
# re-issue query
|
||||
return await self.query(q, params, timeout, read_only)
|
||||
|
||||
async def execution_plan(self, query, params=None):
|
||||
"""
|
||||
Get the execution plan for given query,
|
||||
GRAPH.EXPLAIN returns an array of operations.
|
||||
|
||||
Args:
|
||||
query: the query that will be executed
|
||||
params: query parameters
|
||||
"""
|
||||
query = self._build_params_header(params) + query
|
||||
|
||||
plan = await self.execute_command(EXPLAIN_CMD, self.name, query)
|
||||
if isinstance(plan[0], bytes):
|
||||
plan = [b.decode() for b in plan]
|
||||
return "\n".join(plan)
|
||||
|
||||
async def explain(self, query, params=None):
|
||||
"""
|
||||
Get the execution plan for given query,
|
||||
GRAPH.EXPLAIN returns ExecutionPlan object.
|
||||
|
||||
Args:
|
||||
query: the query that will be executed
|
||||
params: query parameters
|
||||
"""
|
||||
query = self._build_params_header(params) + query
|
||||
|
||||
plan = await self.execute_command(EXPLAIN_CMD, self.name, query)
|
||||
return ExecutionPlan(plan)
|
||||
|
||||
async def flush(self):
|
||||
"""
|
||||
Commit the graph and reset the edges and the nodes to zero length.
|
||||
"""
|
||||
await self.commit()
|
||||
self.nodes = {}
|
||||
self.edges = []
|
||||
@@ -0,0 +1,91 @@
|
||||
from ..helpers import quote_string
|
||||
from .node import Node
|
||||
|
||||
|
||||
class Edge:
|
||||
"""
|
||||
An edge connecting two nodes.
|
||||
"""
|
||||
|
||||
def __init__(self, src_node, relation, dest_node, edge_id=None, properties=None):
|
||||
"""
|
||||
Create a new edge.
|
||||
"""
|
||||
if src_node is None or dest_node is None:
|
||||
# NOTE(bors-42): It makes sense to change AssertionError to
|
||||
# ValueError here
|
||||
raise AssertionError("Both src_node & dest_node must be provided")
|
||||
|
||||
self.id = edge_id
|
||||
self.relation = relation or ""
|
||||
self.properties = properties or {}
|
||||
self.src_node = src_node
|
||||
self.dest_node = dest_node
|
||||
|
||||
def to_string(self):
|
||||
res = ""
|
||||
if self.properties:
|
||||
props = ",".join(
|
||||
key + ":" + str(quote_string(val))
|
||||
for key, val in sorted(self.properties.items())
|
||||
)
|
||||
res += "{" + props + "}"
|
||||
|
||||
return res
|
||||
|
||||
def __str__(self):
|
||||
# Source node.
|
||||
if isinstance(self.src_node, Node):
|
||||
res = str(self.src_node)
|
||||
else:
|
||||
res = "()"
|
||||
|
||||
# Edge
|
||||
res += "-["
|
||||
if self.relation:
|
||||
res += ":" + self.relation
|
||||
if self.properties:
|
||||
props = ",".join(
|
||||
key + ":" + str(quote_string(val))
|
||||
for key, val in sorted(self.properties.items())
|
||||
)
|
||||
res += "{" + props + "}"
|
||||
res += "]->"
|
||||
|
||||
# Dest node.
|
||||
if isinstance(self.dest_node, Node):
|
||||
res += str(self.dest_node)
|
||||
else:
|
||||
res += "()"
|
||||
|
||||
return res
|
||||
|
||||
def __eq__(self, rhs):
|
||||
# Type checking
|
||||
if not isinstance(rhs, Edge):
|
||||
return False
|
||||
|
||||
# Quick positive check, if both IDs are set.
|
||||
if self.id is not None and rhs.id is not None and self.id == rhs.id:
|
||||
return True
|
||||
|
||||
# Source and destination nodes should match.
|
||||
if self.src_node != rhs.src_node:
|
||||
return False
|
||||
|
||||
if self.dest_node != rhs.dest_node:
|
||||
return False
|
||||
|
||||
# Relation should match.
|
||||
if self.relation != rhs.relation:
|
||||
return False
|
||||
|
||||
# Quick check for number of properties.
|
||||
if len(self.properties) != len(rhs.properties):
|
||||
return False
|
||||
|
||||
# Compare properties.
|
||||
if self.properties != rhs.properties:
|
||||
return False
|
||||
|
||||
return True
|
||||
@@ -0,0 +1,3 @@
|
||||
class VersionMismatchException(Exception):
|
||||
def __init__(self, version):
|
||||
self.version = version
|
||||
@@ -0,0 +1,211 @@
|
||||
import re
|
||||
|
||||
|
||||
class ProfileStats:
|
||||
"""
|
||||
ProfileStats, runtime execution statistics of operation.
|
||||
"""
|
||||
|
||||
def __init__(self, records_produced, execution_time):
|
||||
self.records_produced = records_produced
|
||||
self.execution_time = execution_time
|
||||
|
||||
|
||||
class Operation:
|
||||
"""
|
||||
Operation, single operation within execution plan.
|
||||
"""
|
||||
|
||||
def __init__(self, name, args=None, profile_stats=None):
|
||||
"""
|
||||
Create a new operation.
|
||||
|
||||
Args:
|
||||
name: string that represents the name of the operation
|
||||
args: operation arguments
|
||||
profile_stats: profile statistics
|
||||
"""
|
||||
self.name = name
|
||||
self.args = args
|
||||
self.profile_stats = profile_stats
|
||||
self.children = []
|
||||
|
||||
def append_child(self, child):
|
||||
if not isinstance(child, Operation) or self is child:
|
||||
raise Exception("child must be Operation")
|
||||
|
||||
self.children.append(child)
|
||||
return self
|
||||
|
||||
def child_count(self):
|
||||
return len(self.children)
|
||||
|
||||
def __eq__(self, o: object) -> bool:
|
||||
if not isinstance(o, Operation):
|
||||
return False
|
||||
|
||||
return self.name == o.name and self.args == o.args
|
||||
|
||||
def __str__(self) -> str:
|
||||
args_str = "" if self.args is None else " | " + self.args
|
||||
return f"{self.name}{args_str}"
|
||||
|
||||
|
||||
class ExecutionPlan:
|
||||
"""
|
||||
ExecutionPlan, collection of operations.
|
||||
"""
|
||||
|
||||
def __init__(self, plan):
|
||||
"""
|
||||
Create a new execution plan.
|
||||
|
||||
Args:
|
||||
plan: array of strings that represents the collection operations
|
||||
the output from GRAPH.EXPLAIN
|
||||
"""
|
||||
if not isinstance(plan, list):
|
||||
raise Exception("plan must be an array")
|
||||
|
||||
if isinstance(plan[0], bytes):
|
||||
plan = [b.decode() for b in plan]
|
||||
|
||||
self.plan = plan
|
||||
self.structured_plan = self._operation_tree()
|
||||
|
||||
def _compare_operations(self, root_a, root_b):
|
||||
"""
|
||||
Compare execution plan operation tree
|
||||
|
||||
Return: True if operation trees are equal, False otherwise
|
||||
"""
|
||||
|
||||
# compare current root
|
||||
if root_a != root_b:
|
||||
return False
|
||||
|
||||
# make sure root have the same number of children
|
||||
if root_a.child_count() != root_b.child_count():
|
||||
return False
|
||||
|
||||
# recursively compare children
|
||||
for i in range(root_a.child_count()):
|
||||
if not self._compare_operations(root_a.children[i], root_b.children[i]):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def __str__(self) -> str:
|
||||
def aggraget_str(str_children):
|
||||
return "\n".join(
|
||||
[
|
||||
" " + line
|
||||
for str_child in str_children
|
||||
for line in str_child.splitlines()
|
||||
]
|
||||
)
|
||||
|
||||
def combine_str(x, y):
|
||||
return f"{x}\n{y}"
|
||||
|
||||
return self._operation_traverse(
|
||||
self.structured_plan, str, aggraget_str, combine_str
|
||||
)
|
||||
|
||||
def __eq__(self, o: object) -> bool:
|
||||
"""Compares two execution plans
|
||||
|
||||
Return: True if the two plans are equal False otherwise
|
||||
"""
|
||||
# make sure 'o' is an execution-plan
|
||||
if not isinstance(o, ExecutionPlan):
|
||||
return False
|
||||
|
||||
# get root for both plans
|
||||
root_a = self.structured_plan
|
||||
root_b = o.structured_plan
|
||||
|
||||
# compare execution trees
|
||||
return self._compare_operations(root_a, root_b)
|
||||
|
||||
def _operation_traverse(self, op, op_f, aggregate_f, combine_f):
|
||||
"""
|
||||
Traverse operation tree recursively applying functions
|
||||
|
||||
Args:
|
||||
op: operation to traverse
|
||||
op_f: function applied for each operation
|
||||
aggregate_f: aggregation function applied for all children of a single operation
|
||||
combine_f: combine function applied for the operation result and the children result
|
||||
""" # noqa
|
||||
# apply op_f for each operation
|
||||
op_res = op_f(op)
|
||||
if len(op.children) == 0:
|
||||
return op_res # no children return
|
||||
else:
|
||||
# apply _operation_traverse recursively
|
||||
children = [
|
||||
self._operation_traverse(child, op_f, aggregate_f, combine_f)
|
||||
for child in op.children
|
||||
]
|
||||
# combine the operation result with the children aggregated result
|
||||
return combine_f(op_res, aggregate_f(children))
|
||||
|
||||
def _operation_tree(self):
|
||||
"""Build the operation tree from the string representation"""
|
||||
|
||||
# initial state
|
||||
i = 0
|
||||
level = 0
|
||||
stack = []
|
||||
current = None
|
||||
|
||||
def _create_operation(args):
|
||||
profile_stats = None
|
||||
name = args[0].strip()
|
||||
args.pop(0)
|
||||
if len(args) > 0 and "Records produced" in args[-1]:
|
||||
records_produced = int(
|
||||
re.search("Records produced: (\\d+)", args[-1]).group(1)
|
||||
)
|
||||
execution_time = float(
|
||||
re.search("Execution time: (\\d+.\\d+) ms", args[-1]).group(1)
|
||||
)
|
||||
profile_stats = ProfileStats(records_produced, execution_time)
|
||||
args.pop(-1)
|
||||
return Operation(
|
||||
name, None if len(args) == 0 else args[0].strip(), profile_stats
|
||||
)
|
||||
|
||||
# iterate plan operations
|
||||
while i < len(self.plan):
|
||||
current_op = self.plan[i]
|
||||
op_level = current_op.count(" ")
|
||||
if op_level == level:
|
||||
# if the operation level equal to the current level
|
||||
# set the current operation and move next
|
||||
child = _create_operation(current_op.split("|"))
|
||||
if current:
|
||||
current = stack.pop()
|
||||
current.append_child(child)
|
||||
current = child
|
||||
i += 1
|
||||
elif op_level == level + 1:
|
||||
# if the operation is child of the current operation
|
||||
# add it as child and set as current operation
|
||||
child = _create_operation(current_op.split("|"))
|
||||
current.append_child(child)
|
||||
stack.append(current)
|
||||
current = child
|
||||
level += 1
|
||||
i += 1
|
||||
elif op_level < level:
|
||||
# if the operation is not child of current operation
|
||||
# go back to it's parent operation
|
||||
levels_back = level - op_level + 1
|
||||
for _ in range(levels_back):
|
||||
current = stack.pop()
|
||||
level -= levels_back
|
||||
else:
|
||||
raise Exception("corrupted plan")
|
||||
return stack[0]
|
||||
@@ -0,0 +1,88 @@
|
||||
from ..helpers import quote_string
|
||||
|
||||
|
||||
class Node:
|
||||
"""
|
||||
A node within the graph.
|
||||
"""
|
||||
|
||||
def __init__(self, node_id=None, alias=None, label=None, properties=None):
|
||||
"""
|
||||
Create a new node.
|
||||
"""
|
||||
self.id = node_id
|
||||
self.alias = alias
|
||||
if isinstance(label, list):
|
||||
label = [inner_label for inner_label in label if inner_label != ""]
|
||||
|
||||
if (
|
||||
label is None
|
||||
or label == ""
|
||||
or (isinstance(label, list) and len(label) == 0)
|
||||
):
|
||||
self.label = None
|
||||
self.labels = None
|
||||
elif isinstance(label, str):
|
||||
self.label = label
|
||||
self.labels = [label]
|
||||
elif isinstance(label, list) and all(
|
||||
[isinstance(inner_label, str) for inner_label in label]
|
||||
):
|
||||
self.label = label[0]
|
||||
self.labels = label
|
||||
else:
|
||||
raise AssertionError(
|
||||
"label should be either None, string or a list of strings"
|
||||
)
|
||||
|
||||
self.properties = properties or {}
|
||||
|
||||
def to_string(self):
|
||||
res = ""
|
||||
if self.properties:
|
||||
props = ",".join(
|
||||
key + ":" + str(quote_string(val))
|
||||
for key, val in sorted(self.properties.items())
|
||||
)
|
||||
res += "{" + props + "}"
|
||||
|
||||
return res
|
||||
|
||||
def __str__(self):
|
||||
res = "("
|
||||
if self.alias:
|
||||
res += self.alias
|
||||
if self.labels:
|
||||
res += ":" + ":".join(self.labels)
|
||||
if self.properties:
|
||||
props = ",".join(
|
||||
key + ":" + str(quote_string(val))
|
||||
for key, val in sorted(self.properties.items())
|
||||
)
|
||||
res += "{" + props + "}"
|
||||
res += ")"
|
||||
|
||||
return res
|
||||
|
||||
def __eq__(self, rhs):
|
||||
# Type checking
|
||||
if not isinstance(rhs, Node):
|
||||
return False
|
||||
|
||||
# Quick positive check, if both IDs are set.
|
||||
if self.id is not None and rhs.id is not None and self.id != rhs.id:
|
||||
return False
|
||||
|
||||
# Label should match.
|
||||
if self.label != rhs.label:
|
||||
return False
|
||||
|
||||
# Quick check for number of properties.
|
||||
if len(self.properties) != len(rhs.properties):
|
||||
return False
|
||||
|
||||
# Compare properties.
|
||||
if self.properties != rhs.properties:
|
||||
return False
|
||||
|
||||
return True
|
||||
@@ -0,0 +1,78 @@
|
||||
from .edge import Edge
|
||||
from .node import Node
|
||||
|
||||
|
||||
class Path:
|
||||
def __init__(self, nodes, edges):
|
||||
if not (isinstance(nodes, list) and isinstance(edges, list)):
|
||||
raise TypeError("nodes and edges must be list")
|
||||
|
||||
self._nodes = nodes
|
||||
self._edges = edges
|
||||
self.append_type = Node
|
||||
|
||||
@classmethod
|
||||
def new_empty_path(cls):
|
||||
return cls([], [])
|
||||
|
||||
def nodes(self):
|
||||
return self._nodes
|
||||
|
||||
def edges(self):
|
||||
return self._edges
|
||||
|
||||
def get_node(self, index):
|
||||
return self._nodes[index]
|
||||
|
||||
def get_relationship(self, index):
|
||||
return self._edges[index]
|
||||
|
||||
def first_node(self):
|
||||
return self._nodes[0]
|
||||
|
||||
def last_node(self):
|
||||
return self._nodes[-1]
|
||||
|
||||
def edge_count(self):
|
||||
return len(self._edges)
|
||||
|
||||
def nodes_count(self):
|
||||
return len(self._nodes)
|
||||
|
||||
def add_node(self, node):
|
||||
if not isinstance(node, self.append_type):
|
||||
raise AssertionError("Add Edge before adding Node")
|
||||
self._nodes.append(node)
|
||||
self.append_type = Edge
|
||||
return self
|
||||
|
||||
def add_edge(self, edge):
|
||||
if not isinstance(edge, self.append_type):
|
||||
raise AssertionError("Add Node before adding Edge")
|
||||
self._edges.append(edge)
|
||||
self.append_type = Node
|
||||
return self
|
||||
|
||||
def __eq__(self, other):
|
||||
# Type checking
|
||||
if not isinstance(other, Path):
|
||||
return False
|
||||
|
||||
return self.nodes() == other.nodes() and self.edges() == other.edges()
|
||||
|
||||
def __str__(self):
|
||||
res = "<"
|
||||
edge_count = self.edge_count()
|
||||
for i in range(0, edge_count):
|
||||
node_id = self.get_node(i).id
|
||||
res += "(" + str(node_id) + ")"
|
||||
edge = self.get_relationship(i)
|
||||
res += (
|
||||
"-[" + str(int(edge.id)) + "]->"
|
||||
if edge.src_node == node_id
|
||||
else "<-[" + str(int(edge.id)) + "]-"
|
||||
)
|
||||
node_id = self.get_node(edge_count).id
|
||||
res += "(" + str(node_id) + ")"
|
||||
res += ">"
|
||||
return res
|
||||
@@ -0,0 +1,588 @@
|
||||
import sys
|
||||
from collections import OrderedDict
|
||||
|
||||
# from prettytable import PrettyTable
|
||||
from redis import ResponseError
|
||||
|
||||
from .edge import Edge
|
||||
from .exceptions import VersionMismatchException
|
||||
from .node import Node
|
||||
from .path import Path
|
||||
|
||||
LABELS_ADDED = "Labels added"
|
||||
LABELS_REMOVED = "Labels removed"
|
||||
NODES_CREATED = "Nodes created"
|
||||
NODES_DELETED = "Nodes deleted"
|
||||
RELATIONSHIPS_DELETED = "Relationships deleted"
|
||||
PROPERTIES_SET = "Properties set"
|
||||
PROPERTIES_REMOVED = "Properties removed"
|
||||
RELATIONSHIPS_CREATED = "Relationships created"
|
||||
INDICES_CREATED = "Indices created"
|
||||
INDICES_DELETED = "Indices deleted"
|
||||
CACHED_EXECUTION = "Cached execution"
|
||||
INTERNAL_EXECUTION_TIME = "internal execution time"
|
||||
|
||||
STATS = [
|
||||
LABELS_ADDED,
|
||||
LABELS_REMOVED,
|
||||
NODES_CREATED,
|
||||
PROPERTIES_SET,
|
||||
PROPERTIES_REMOVED,
|
||||
RELATIONSHIPS_CREATED,
|
||||
NODES_DELETED,
|
||||
RELATIONSHIPS_DELETED,
|
||||
INDICES_CREATED,
|
||||
INDICES_DELETED,
|
||||
CACHED_EXECUTION,
|
||||
INTERNAL_EXECUTION_TIME,
|
||||
]
|
||||
|
||||
|
||||
class ResultSetColumnTypes:
|
||||
COLUMN_UNKNOWN = 0
|
||||
COLUMN_SCALAR = 1
|
||||
COLUMN_NODE = 2 # Unused as of RedisGraph v2.1.0, retained for backwards compatibility. # noqa
|
||||
COLUMN_RELATION = 3 # Unused as of RedisGraph v2.1.0, retained for backwards compatibility. # noqa
|
||||
|
||||
|
||||
class ResultSetScalarTypes:
|
||||
VALUE_UNKNOWN = 0
|
||||
VALUE_NULL = 1
|
||||
VALUE_STRING = 2
|
||||
VALUE_INTEGER = 3
|
||||
VALUE_BOOLEAN = 4
|
||||
VALUE_DOUBLE = 5
|
||||
VALUE_ARRAY = 6
|
||||
VALUE_EDGE = 7
|
||||
VALUE_NODE = 8
|
||||
VALUE_PATH = 9
|
||||
VALUE_MAP = 10
|
||||
VALUE_POINT = 11
|
||||
|
||||
|
||||
class QueryResult:
|
||||
def __init__(self, graph, response, profile=False):
|
||||
"""
|
||||
A class that represents a result of the query operation.
|
||||
|
||||
Args:
|
||||
|
||||
graph:
|
||||
The graph on which the query was executed.
|
||||
response:
|
||||
The response from the server.
|
||||
profile:
|
||||
A boolean indicating if the query command was "GRAPH.PROFILE"
|
||||
"""
|
||||
self.graph = graph
|
||||
self.header = []
|
||||
self.result_set = []
|
||||
|
||||
# in case of an error an exception will be raised
|
||||
self._check_for_errors(response)
|
||||
|
||||
if len(response) == 1:
|
||||
self.parse_statistics(response[0])
|
||||
elif profile:
|
||||
self.parse_profile(response)
|
||||
else:
|
||||
# start by parsing statistics, matches the one we have
|
||||
self.parse_statistics(response[-1]) # Last element.
|
||||
self.parse_results(response)
|
||||
|
||||
def _check_for_errors(self, response):
|
||||
"""
|
||||
Check if the response contains an error.
|
||||
"""
|
||||
if isinstance(response[0], ResponseError):
|
||||
error = response[0]
|
||||
if str(error) == "version mismatch":
|
||||
version = response[1]
|
||||
error = VersionMismatchException(version)
|
||||
raise error
|
||||
|
||||
# If we encountered a run-time error, the last response
|
||||
# element will be an exception
|
||||
if isinstance(response[-1], ResponseError):
|
||||
raise response[-1]
|
||||
|
||||
def parse_results(self, raw_result_set):
|
||||
"""
|
||||
Parse the query execution result returned from the server.
|
||||
"""
|
||||
self.header = self.parse_header(raw_result_set)
|
||||
|
||||
# Empty header.
|
||||
if len(self.header) == 0:
|
||||
return
|
||||
|
||||
self.result_set = self.parse_records(raw_result_set)
|
||||
|
||||
def parse_statistics(self, raw_statistics):
|
||||
"""
|
||||
Parse the statistics returned in the response.
|
||||
"""
|
||||
self.statistics = {}
|
||||
|
||||
# decode statistics
|
||||
for idx, stat in enumerate(raw_statistics):
|
||||
if isinstance(stat, bytes):
|
||||
raw_statistics[idx] = stat.decode()
|
||||
|
||||
for s in STATS:
|
||||
v = self._get_value(s, raw_statistics)
|
||||
if v is not None:
|
||||
self.statistics[s] = v
|
||||
|
||||
def parse_header(self, raw_result_set):
|
||||
"""
|
||||
Parse the header of the result.
|
||||
"""
|
||||
# An array of column name/column type pairs.
|
||||
header = raw_result_set[0]
|
||||
return header
|
||||
|
||||
def parse_records(self, raw_result_set):
|
||||
"""
|
||||
Parses the result set and returns a list of records.
|
||||
"""
|
||||
records = [
|
||||
[
|
||||
self.parse_record_types[self.header[idx][0]](cell)
|
||||
for idx, cell in enumerate(row)
|
||||
]
|
||||
for row in raw_result_set[1]
|
||||
]
|
||||
|
||||
return records
|
||||
|
||||
def parse_entity_properties(self, props):
|
||||
"""
|
||||
Parse node / edge properties.
|
||||
"""
|
||||
# [[name, value type, value] X N]
|
||||
properties = {}
|
||||
for prop in props:
|
||||
prop_name = self.graph.get_property(prop[0])
|
||||
prop_value = self.parse_scalar(prop[1:])
|
||||
properties[prop_name] = prop_value
|
||||
|
||||
return properties
|
||||
|
||||
def parse_string(self, cell):
|
||||
"""
|
||||
Parse the cell as a string.
|
||||
"""
|
||||
if isinstance(cell, bytes):
|
||||
return cell.decode()
|
||||
elif not isinstance(cell, str):
|
||||
return str(cell)
|
||||
else:
|
||||
return cell
|
||||
|
||||
def parse_node(self, cell):
|
||||
"""
|
||||
Parse the cell to a node.
|
||||
"""
|
||||
# Node ID (integer),
|
||||
# [label string offset (integer)],
|
||||
# [[name, value type, value] X N]
|
||||
|
||||
node_id = int(cell[0])
|
||||
labels = None
|
||||
if len(cell[1]) > 0:
|
||||
labels = []
|
||||
for inner_label in cell[1]:
|
||||
labels.append(self.graph.get_label(inner_label))
|
||||
properties = self.parse_entity_properties(cell[2])
|
||||
return Node(node_id=node_id, label=labels, properties=properties)
|
||||
|
||||
def parse_edge(self, cell):
|
||||
"""
|
||||
Parse the cell to an edge.
|
||||
"""
|
||||
# Edge ID (integer),
|
||||
# reltype string offset (integer),
|
||||
# src node ID offset (integer),
|
||||
# dest node ID offset (integer),
|
||||
# [[name, value, value type] X N]
|
||||
|
||||
edge_id = int(cell[0])
|
||||
relation = self.graph.get_relation(cell[1])
|
||||
src_node_id = int(cell[2])
|
||||
dest_node_id = int(cell[3])
|
||||
properties = self.parse_entity_properties(cell[4])
|
||||
return Edge(
|
||||
src_node_id, relation, dest_node_id, edge_id=edge_id, properties=properties
|
||||
)
|
||||
|
||||
def parse_path(self, cell):
|
||||
"""
|
||||
Parse the cell to a path.
|
||||
"""
|
||||
nodes = self.parse_scalar(cell[0])
|
||||
edges = self.parse_scalar(cell[1])
|
||||
return Path(nodes, edges)
|
||||
|
||||
def parse_map(self, cell):
|
||||
"""
|
||||
Parse the cell as a map.
|
||||
"""
|
||||
m = OrderedDict()
|
||||
n_entries = len(cell)
|
||||
|
||||
# A map is an array of key value pairs.
|
||||
# 1. key (string)
|
||||
# 2. array: (value type, value)
|
||||
for i in range(0, n_entries, 2):
|
||||
key = self.parse_string(cell[i])
|
||||
m[key] = self.parse_scalar(cell[i + 1])
|
||||
|
||||
return m
|
||||
|
||||
def parse_point(self, cell):
|
||||
"""
|
||||
Parse the cell to point.
|
||||
"""
|
||||
p = {}
|
||||
# A point is received an array of the form: [latitude, longitude]
|
||||
# It is returned as a map of the form: {"latitude": latitude, "longitude": longitude} # noqa
|
||||
p["latitude"] = float(cell[0])
|
||||
p["longitude"] = float(cell[1])
|
||||
return p
|
||||
|
||||
def parse_null(self, cell):
|
||||
"""
|
||||
Parse a null value.
|
||||
"""
|
||||
return None
|
||||
|
||||
def parse_integer(self, cell):
|
||||
"""
|
||||
Parse the integer value from the cell.
|
||||
"""
|
||||
return int(cell)
|
||||
|
||||
def parse_boolean(self, value):
|
||||
"""
|
||||
Parse the cell value as a boolean.
|
||||
"""
|
||||
value = value.decode() if isinstance(value, bytes) else value
|
||||
try:
|
||||
scalar = True if strtobool(value) else False
|
||||
except ValueError:
|
||||
sys.stderr.write("unknown boolean type\n")
|
||||
scalar = None
|
||||
return scalar
|
||||
|
||||
def parse_double(self, cell):
|
||||
"""
|
||||
Parse the cell as a double.
|
||||
"""
|
||||
return float(cell)
|
||||
|
||||
def parse_array(self, value):
|
||||
"""
|
||||
Parse an array of values.
|
||||
"""
|
||||
scalar = [self.parse_scalar(value[i]) for i in range(len(value))]
|
||||
return scalar
|
||||
|
||||
def parse_unknown(self, cell):
|
||||
"""
|
||||
Parse a cell of unknown type.
|
||||
"""
|
||||
sys.stderr.write("Unknown type\n")
|
||||
return None
|
||||
|
||||
def parse_scalar(self, cell):
|
||||
"""
|
||||
Parse a scalar value from a cell in the result set.
|
||||
"""
|
||||
scalar_type = int(cell[0])
|
||||
value = cell[1]
|
||||
scalar = self.parse_scalar_types[scalar_type](value)
|
||||
|
||||
return scalar
|
||||
|
||||
def parse_profile(self, response):
|
||||
self.result_set = [x[0 : x.index(",")].strip() for x in response]
|
||||
|
||||
def is_empty(self):
|
||||
return len(self.result_set) == 0
|
||||
|
||||
@staticmethod
|
||||
def _get_value(prop, statistics):
|
||||
for stat in statistics:
|
||||
if prop in stat:
|
||||
return float(stat.split(": ")[1].split(" ")[0])
|
||||
|
||||
return None
|
||||
|
||||
def _get_stat(self, stat):
|
||||
return self.statistics[stat] if stat in self.statistics else 0
|
||||
|
||||
@property
|
||||
def labels_added(self):
|
||||
"""Returns the number of labels added in the query"""
|
||||
return self._get_stat(LABELS_ADDED)
|
||||
|
||||
@property
|
||||
def labels_removed(self):
|
||||
"""Returns the number of labels removed in the query"""
|
||||
return self._get_stat(LABELS_REMOVED)
|
||||
|
||||
@property
|
||||
def nodes_created(self):
|
||||
"""Returns the number of nodes created in the query"""
|
||||
return self._get_stat(NODES_CREATED)
|
||||
|
||||
@property
|
||||
def nodes_deleted(self):
|
||||
"""Returns the number of nodes deleted in the query"""
|
||||
return self._get_stat(NODES_DELETED)
|
||||
|
||||
@property
|
||||
def properties_set(self):
|
||||
"""Returns the number of properties set in the query"""
|
||||
return self._get_stat(PROPERTIES_SET)
|
||||
|
||||
@property
|
||||
def properties_removed(self):
|
||||
"""Returns the number of properties removed in the query"""
|
||||
return self._get_stat(PROPERTIES_REMOVED)
|
||||
|
||||
@property
|
||||
def relationships_created(self):
|
||||
"""Returns the number of relationships created in the query"""
|
||||
return self._get_stat(RELATIONSHIPS_CREATED)
|
||||
|
||||
@property
|
||||
def relationships_deleted(self):
|
||||
"""Returns the number of relationships deleted in the query"""
|
||||
return self._get_stat(RELATIONSHIPS_DELETED)
|
||||
|
||||
@property
|
||||
def indices_created(self):
|
||||
"""Returns the number of indices created in the query"""
|
||||
return self._get_stat(INDICES_CREATED)
|
||||
|
||||
@property
|
||||
def indices_deleted(self):
|
||||
"""Returns the number of indices deleted in the query"""
|
||||
return self._get_stat(INDICES_DELETED)
|
||||
|
||||
@property
|
||||
def cached_execution(self):
|
||||
"""Returns whether or not the query execution plan was cached"""
|
||||
return self._get_stat(CACHED_EXECUTION) == 1
|
||||
|
||||
@property
|
||||
def run_time_ms(self):
|
||||
"""Returns the server execution time of the query"""
|
||||
return self._get_stat(INTERNAL_EXECUTION_TIME)
|
||||
|
||||
@property
|
||||
def parse_scalar_types(self):
|
||||
return {
|
||||
ResultSetScalarTypes.VALUE_NULL: self.parse_null,
|
||||
ResultSetScalarTypes.VALUE_STRING: self.parse_string,
|
||||
ResultSetScalarTypes.VALUE_INTEGER: self.parse_integer,
|
||||
ResultSetScalarTypes.VALUE_BOOLEAN: self.parse_boolean,
|
||||
ResultSetScalarTypes.VALUE_DOUBLE: self.parse_double,
|
||||
ResultSetScalarTypes.VALUE_ARRAY: self.parse_array,
|
||||
ResultSetScalarTypes.VALUE_NODE: self.parse_node,
|
||||
ResultSetScalarTypes.VALUE_EDGE: self.parse_edge,
|
||||
ResultSetScalarTypes.VALUE_PATH: self.parse_path,
|
||||
ResultSetScalarTypes.VALUE_MAP: self.parse_map,
|
||||
ResultSetScalarTypes.VALUE_POINT: self.parse_point,
|
||||
ResultSetScalarTypes.VALUE_UNKNOWN: self.parse_unknown,
|
||||
}
|
||||
|
||||
@property
|
||||
def parse_record_types(self):
|
||||
return {
|
||||
ResultSetColumnTypes.COLUMN_SCALAR: self.parse_scalar,
|
||||
ResultSetColumnTypes.COLUMN_NODE: self.parse_node,
|
||||
ResultSetColumnTypes.COLUMN_RELATION: self.parse_edge,
|
||||
ResultSetColumnTypes.COLUMN_UNKNOWN: self.parse_unknown,
|
||||
}
|
||||
|
||||
|
||||
class AsyncQueryResult(QueryResult):
|
||||
"""
|
||||
Async version for the QueryResult class - a class that
|
||||
represents a result of the query operation.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""
|
||||
To init the class you must call self.initialize()
|
||||
"""
|
||||
pass
|
||||
|
||||
async def initialize(self, graph, response, profile=False):
|
||||
"""
|
||||
Initializes the class.
|
||||
Args:
|
||||
|
||||
graph:
|
||||
The graph on which the query was executed.
|
||||
response:
|
||||
The response from the server.
|
||||
profile:
|
||||
A boolean indicating if the query command was "GRAPH.PROFILE"
|
||||
"""
|
||||
self.graph = graph
|
||||
self.header = []
|
||||
self.result_set = []
|
||||
|
||||
# in case of an error an exception will be raised
|
||||
self._check_for_errors(response)
|
||||
|
||||
if len(response) == 1:
|
||||
self.parse_statistics(response[0])
|
||||
elif profile:
|
||||
self.parse_profile(response)
|
||||
else:
|
||||
# start by parsing statistics, matches the one we have
|
||||
self.parse_statistics(response[-1]) # Last element.
|
||||
await self.parse_results(response)
|
||||
|
||||
return self
|
||||
|
||||
async def parse_node(self, cell):
|
||||
"""
|
||||
Parses a node from the cell.
|
||||
"""
|
||||
# Node ID (integer),
|
||||
# [label string offset (integer)],
|
||||
# [[name, value type, value] X N]
|
||||
|
||||
labels = None
|
||||
if len(cell[1]) > 0:
|
||||
labels = []
|
||||
for inner_label in cell[1]:
|
||||
labels.append(await self.graph.get_label(inner_label))
|
||||
properties = await self.parse_entity_properties(cell[2])
|
||||
node_id = int(cell[0])
|
||||
return Node(node_id=node_id, label=labels, properties=properties)
|
||||
|
||||
async def parse_scalar(self, cell):
|
||||
"""
|
||||
Parses a scalar value from the server response.
|
||||
"""
|
||||
scalar_type = int(cell[0])
|
||||
value = cell[1]
|
||||
try:
|
||||
scalar = await self.parse_scalar_types[scalar_type](value)
|
||||
except TypeError:
|
||||
# Not all of the functions are async
|
||||
scalar = self.parse_scalar_types[scalar_type](value)
|
||||
|
||||
return scalar
|
||||
|
||||
async def parse_records(self, raw_result_set):
|
||||
"""
|
||||
Parses the result set and returns a list of records.
|
||||
"""
|
||||
records = []
|
||||
for row in raw_result_set[1]:
|
||||
record = [
|
||||
await self.parse_record_types[self.header[idx][0]](cell)
|
||||
for idx, cell in enumerate(row)
|
||||
]
|
||||
records.append(record)
|
||||
|
||||
return records
|
||||
|
||||
async def parse_results(self, raw_result_set):
|
||||
"""
|
||||
Parse the query execution result returned from the server.
|
||||
"""
|
||||
self.header = self.parse_header(raw_result_set)
|
||||
|
||||
# Empty header.
|
||||
if len(self.header) == 0:
|
||||
return
|
||||
|
||||
self.result_set = await self.parse_records(raw_result_set)
|
||||
|
||||
async def parse_entity_properties(self, props):
|
||||
"""
|
||||
Parse node / edge properties.
|
||||
"""
|
||||
# [[name, value type, value] X N]
|
||||
properties = {}
|
||||
for prop in props:
|
||||
prop_name = await self.graph.get_property(prop[0])
|
||||
prop_value = await self.parse_scalar(prop[1:])
|
||||
properties[prop_name] = prop_value
|
||||
|
||||
return properties
|
||||
|
||||
async def parse_edge(self, cell):
|
||||
"""
|
||||
Parse the cell to an edge.
|
||||
"""
|
||||
# Edge ID (integer),
|
||||
# reltype string offset (integer),
|
||||
# src node ID offset (integer),
|
||||
# dest node ID offset (integer),
|
||||
# [[name, value, value type] X N]
|
||||
|
||||
edge_id = int(cell[0])
|
||||
relation = await self.graph.get_relation(cell[1])
|
||||
src_node_id = int(cell[2])
|
||||
dest_node_id = int(cell[3])
|
||||
properties = await self.parse_entity_properties(cell[4])
|
||||
return Edge(
|
||||
src_node_id, relation, dest_node_id, edge_id=edge_id, properties=properties
|
||||
)
|
||||
|
||||
async def parse_path(self, cell):
|
||||
"""
|
||||
Parse the cell to a path.
|
||||
"""
|
||||
nodes = await self.parse_scalar(cell[0])
|
||||
edges = await self.parse_scalar(cell[1])
|
||||
return Path(nodes, edges)
|
||||
|
||||
async def parse_map(self, cell):
|
||||
"""
|
||||
Parse the cell to a map.
|
||||
"""
|
||||
m = OrderedDict()
|
||||
n_entries = len(cell)
|
||||
|
||||
# A map is an array of key value pairs.
|
||||
# 1. key (string)
|
||||
# 2. array: (value type, value)
|
||||
for i in range(0, n_entries, 2):
|
||||
key = self.parse_string(cell[i])
|
||||
m[key] = await self.parse_scalar(cell[i + 1])
|
||||
|
||||
return m
|
||||
|
||||
async def parse_array(self, value):
|
||||
"""
|
||||
Parse array value.
|
||||
"""
|
||||
scalar = [await self.parse_scalar(value[i]) for i in range(len(value))]
|
||||
return scalar
|
||||
|
||||
|
||||
def strtobool(val):
|
||||
"""
|
||||
Convert a string representation of truth to true (1) or false (0).
|
||||
True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values
|
||||
are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if
|
||||
'val' is anything else.
|
||||
"""
|
||||
val = val.lower()
|
||||
if val in ("y", "yes", "t", "true", "on", "1"):
|
||||
return True
|
||||
elif val in ("n", "no", "f", "false", "off", "0"):
|
||||
return False
|
||||
else:
|
||||
raise ValueError(f"invalid truth value {val!r}")
|
||||
Reference in New Issue
Block a user