Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 0 additions & 2 deletions noxfile.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
import nox
import sys
# v = sys.version.split(" ")[0]

# Reuse environments to speed things up locally (optional)
nox.options.reuse_venv = "yes"
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@

setup(
name="tensor-shape-assert",
version="0.3.0",
version="0.3.1",
description="A simple runtime assert library for tensor-based frameworks.",
long_description=long_description,
long_description_content_type="text/markdown",
Expand Down
18 changes: 15 additions & 3 deletions src/tensor_shape_assert/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,18 @@
from .wrapper import (
check_tensor_shapes, get_shape_variables, assert_shape_here,
check_tensor_shapes,
get_shape_variables,
assert_shape_here,
set_global_check_mode
)
from .types import ShapedTensor, ShapedTorchLiteral, ShapedNumpyLiteral, ShapedLiteral
from .types import ScalarTensor # type: ignore
from .types import (
ShapedTensor,
ShapedTorchLiteral,
ShapedNumpyLiteral,
ShapedLiteral
)
from .types import ScalarTensor # type: ignore
from .trace import (
start_trace_recording,
stop_trace_recording,
trace_records_to_string
)
2 changes: 2 additions & 0 deletions src/tensor_shape_assert/descriptor.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,6 +150,8 @@ def descriptor_to_variables(shape_descriptor, shape, variables=None):
)

if resulting_value is not None and not isinstance(desc_item, int):
if not isinstance(resulting_value, int):
resulting_value = tuple(resulting_value)
variables[desc_item] = resulting_value

return variables
122 changes: 122 additions & 0 deletions src/tensor_shape_assert/trace.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,122 @@
import inspect

from typing import NamedTuple
from .types import VariablesType


class TracedVariableAssignment(NamedTuple):
name: str | None
annotation: str | None
shape: tuple[int, ...]
assignments: VariablesType

def __str__(self) -> str:
return (
f"{self.name} : ({self.annotation}) -> shape {self.shape} => {self.assignments}"
)

class TracedFunctionCall(NamedTuple):
function_name: str | None
file: str | None
line: int
stack_index: int
call_index: int

def __str__(self) -> str:
return (
f"{self.function_name} (defined at {self.file}:{self.line}), "
f"stack index: {self.stack_index}, call index: {self.call_index}"
)

class TraceRecord(NamedTuple):
# function metadata
function: TracedFunctionCall
assignment: TracedVariableAssignment


_trace_stack: list[TracedFunctionCall] = []
_trace_records: list[TraceRecord] = []
_trace_enabled: bool = False

def add_function_trace(fn):
global _trace_enabled
if not _trace_enabled:
return

fn_code = inspect.getsourcefile(fn), inspect.getsourcelines(fn)[1]
trace_record = TracedFunctionCall(
function_name=fn.__name__,
file=fn_code[0],
line=fn_code[1],
stack_index=len(_trace_stack),
call_index=len(_trace_records)
)
_trace_stack.append(trace_record)

def add_assignment_trace(
name: str | None,
annotation: str | None,
shape: tuple[int, ...],
assignments: VariablesType
):
global _trace_enabled
if not _trace_enabled:
return

if len(_trace_stack) == 0:
raise RuntimeError(
"Internal error: Tried to add assignment trace without an active "
"function trace."
)

function_trace = _trace_stack[-1]
_trace_records.append(
TraceRecord(
function=function_trace,
assignment=TracedVariableAssignment(
name=name,
annotation=annotation,
shape=shape,
assignments=assignments.copy()
)
)
)

def finalize_function_trace():
global _trace_enabled
if not _trace_enabled:
return

if len(_trace_stack) == 0:
raise RuntimeError(
"Internal error: Tried to finalize function trace without an active "
"function trace."
)

_trace_stack.pop()


def start_trace_recording():
global _trace_enabled
_trace_enabled = True

def stop_trace_recording() -> list[TraceRecord]:
global _trace_enabled
_trace_enabled = False
records = _trace_records.copy()
_trace_records.clear()
return records

def trace_records_to_string(records: list[TraceRecord]) -> str:
lines = []
cur_stack_size = -1
for record in records:
indentation = "| " * record.function.stack_index

if record.function.stack_index > cur_stack_size:
lines.append(f"{indentation}\n{indentation}{record.function}")
cur_stack_size = record.function.stack_index

lines.append(f"{indentation}| {record.assignment}")

return "\n".join(lines)
10 changes: 10 additions & 0 deletions src/tensor_shape_assert/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@
clean_up_descriptor
)

VariablesType = dict[str, tuple[int] | int]

# define str subclasses to identify shape descriptors

_NAME_TO_KIND = {
Expand Down Expand Up @@ -144,6 +146,13 @@ def __class_getitem__(cls, key):
type_params=(T, S)
)

# TODO: this can be made more useful by using a library-specific scalar type
ScalarTensor = TypeAliasType(
'ScalarTensor',
ShapedLiteral[float, Literal[""]],
type_params=()
)

# torch

try:
Expand All @@ -167,6 +176,7 @@ def __class_getitem__(cls, key):
)
except ImportError:
pass


else:
ShapedLiteral = ShapedTensor
Expand Down
Loading