-
Notifications
You must be signed in to change notification settings - Fork 7
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
3 changed files
with
168 additions
and
84 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,50 +1,126 @@ | ||
# pylint: disable=missing-function-docstring | ||
|
||
from cirkit.new.layers import CategoricalLayer, CPLayer, TuckerLayer | ||
from typing import Dict | ||
|
||
from cirkit.new.layers import CategoricalLayer, DenseLayer, HadamardLayer, TuckerLayer | ||
from cirkit.new.reparams import ExpReparam | ||
from cirkit.new.symbolic import SymbolicInputLayer, SymbolicProductLayer, SymbolicSumLayer | ||
from tests.new.symbolic.test_utils import get_simple_rg | ||
|
||
# TODO: avoid repetition? | ||
|
||
def test_symbolic_sum_layer() -> None: | ||
scope = {0, 1} | ||
num_units = 3 | ||
layer = SymbolicSumLayer(scope, num_units, TuckerLayer, reparam=ExpReparam()) | ||
assert "SymbolicSumLayer" in repr(layer) | ||
assert "Scope: Scope({0, 1})" in repr(layer) | ||
assert "Layer Class: TuckerLayer" in repr(layer) | ||
assert "Number of Units: 3" in repr(layer) | ||
|
||
def test_symbolic_layers_sum_and_prod() -> None: | ||
rg = get_simple_rg() | ||
input_node0, input_node1 = rg.input_nodes | ||
(partition_node,) = rg.partition_nodes | ||
(region_node,) = rg.inner_region_nodes | ||
|
||
def test_symbolic_sum_layer_cp() -> None: | ||
scope = {0, 1} | ||
num_units = 3 | ||
layer_kwargs = {"collapsed": False, "shared": False, "arity": 2} | ||
layer = SymbolicSumLayer(scope, num_units, CPLayer, layer_kwargs, reparam=ExpReparam()) | ||
assert "SymbolicSumLayer" in repr(layer) | ||
assert "Scope: Scope({0, 1})" in repr(layer) | ||
assert "Layer Class: CPLayer" in repr(layer) | ||
assert "Number of Units: 3" in repr(layer) | ||
|
||
|
||
def test_symbolic_product_node() -> None: | ||
scope = {0, 1} | ||
num_input_units = 2 | ||
layer = SymbolicProductLayer(scope, num_input_units, TuckerLayer) | ||
assert "SymbolicProductLayer" in repr(layer) | ||
assert "Scope: Scope({0, 1})" in repr(layer) | ||
assert "Layer Class: TuckerLayer" in repr(layer) | ||
assert "Number of Units: 2" in repr(layer) | ||
|
||
|
||
def test_symbolic_input_node() -> None: | ||
scope = {0, 1} | ||
input_kwargs = {"num_categories": 5} | ||
sum_kwargs: Dict[str, None] = {} # Avoid Any. | ||
reparam = ExpReparam() | ||
|
||
input_layer0 = SymbolicInputLayer( | ||
input_node0, | ||
(), | ||
num_units=num_units, | ||
layer_cls=CategoricalLayer, | ||
layer_kwargs=input_kwargs, | ||
reparam=reparam, | ||
) | ||
assert "SymbolicInputLayer" in repr(input_layer0) | ||
assert "Scope: Scope({0})" in repr(input_layer0) | ||
assert "Input Exp Family Class: CategoricalLayer" in repr(input_layer0) | ||
assert "Layer KWArgs: {'num_categories': 5}" in repr(input_layer0) | ||
assert "Number of Units: 3" in repr(input_layer0) | ||
input_layer1 = SymbolicInputLayer( | ||
input_node1, | ||
(), | ||
num_units=num_units, | ||
layer_cls=CategoricalLayer, | ||
layer_kwargs=input_kwargs, | ||
reparam=reparam, | ||
) | ||
|
||
prod_layer = SymbolicProductLayer( | ||
partition_node, | ||
(input_layer0, input_layer1), | ||
num_units=num_units, | ||
layer_cls=HadamardLayer, | ||
) | ||
assert "SymbolicProductLayer" in repr(prod_layer) | ||
assert "Scope: Scope({0, 1})" in repr(prod_layer) | ||
assert "Layer Class: HadamardLayer" in repr(prod_layer) | ||
assert "Number of Units: 3" in repr(prod_layer) | ||
|
||
sum_layer = SymbolicSumLayer( | ||
region_node, | ||
(prod_layer,), | ||
num_units=num_units, | ||
layer_cls=DenseLayer, | ||
layer_kwargs=sum_kwargs, | ||
reparam=reparam, | ||
) | ||
assert "SymbolicSumLayer" in repr(sum_layer) | ||
assert "Scope: Scope({0, 1})" in repr(sum_layer) | ||
assert "Layer Class: DenseLayer" in repr(sum_layer) | ||
assert "Number of Units: 3" in repr(sum_layer) | ||
|
||
|
||
def test_symbolic_layers_sum_prod() -> None: | ||
rg = get_simple_rg() | ||
input_node0, input_node1 = rg.input_nodes | ||
(partition_node,) = rg.partition_nodes | ||
(region_node,) = rg.inner_region_nodes | ||
|
||
num_units = 3 | ||
input_kwargs = {"num_categories": 5} | ||
layer = SymbolicInputLayer( | ||
scope, num_units, CategoricalLayer, input_kwargs, reparam=ExpReparam() | ||
) | ||
assert "SymbolicInputLayer" in repr(layer) | ||
assert "Scope: Scope({0, 1})" in repr(layer) | ||
assert "Input Exp Family Class: CategoricalLayer" in repr(layer) | ||
assert "Layer KWArgs: {'num_categories': 5}" in repr(layer) | ||
assert "Number of Units: 3" in repr(layer) | ||
sum_kwargs: Dict[str, None] = {} # Avoid Any. | ||
reparam = ExpReparam() | ||
|
||
input_layer0 = SymbolicInputLayer( | ||
input_node0, | ||
(), | ||
num_units=num_units, | ||
layer_cls=CategoricalLayer, | ||
layer_kwargs=input_kwargs, | ||
reparam=reparam, | ||
) | ||
assert "SymbolicInputLayer" in repr(input_layer0) | ||
assert "Scope: Scope({0})" in repr(input_layer0) | ||
assert "Input Exp Family Class: CategoricalLayer" in repr(input_layer0) | ||
assert "Layer KWArgs: {'num_categories': 5}" in repr(input_layer0) | ||
assert "Number of Units: 3" in repr(input_layer0) | ||
input_layer1 = SymbolicInputLayer( | ||
input_node1, | ||
(), | ||
num_units=num_units, | ||
layer_cls=CategoricalLayer, | ||
layer_kwargs=input_kwargs, | ||
reparam=reparam, | ||
) | ||
|
||
prod_layer = SymbolicProductLayer( | ||
partition_node, | ||
(input_layer0, input_layer1), | ||
num_units=num_units**2, | ||
layer_cls=TuckerLayer, | ||
) | ||
assert "SymbolicProductLayer" in repr(prod_layer) | ||
assert "Scope: Scope({0, 1})" in repr(prod_layer) | ||
assert "Layer Class: TuckerLayer" in repr(prod_layer) | ||
assert "Number of Units: 9" in repr(prod_layer) | ||
|
||
sum_layer = SymbolicSumLayer( | ||
region_node, | ||
(prod_layer,), | ||
num_units=num_units, | ||
layer_cls=TuckerLayer, | ||
layer_kwargs=sum_kwargs, | ||
reparam=reparam, | ||
) | ||
assert "SymbolicSumLayer" in repr(sum_layer) | ||
assert "Scope: Scope({0, 1})" in repr(sum_layer) | ||
assert "Layer Class: TuckerLayer" in repr(sum_layer) | ||
assert "Number of Units: 3" in repr(sum_layer) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,39 @@ | ||
# pylint: disable=missing-function-docstring,missing-return-doc | ||
from typing import Dict | ||
|
||
from cirkit.new.layers import CategoricalLayer, CPLayer | ||
from cirkit.new.region_graph import RegionGraph, RegionNode | ||
from cirkit.new.reparams import ExpReparam | ||
from cirkit.new.symbolic import SymbolicCircuit | ||
|
||
|
||
def get_simple_rg() -> RegionGraph: | ||
rg = RegionGraph() | ||
node1 = RegionNode({0}) | ||
node2 = RegionNode({1}) | ||
region = RegionNode({0, 1}) | ||
rg.add_partitioning(region, (node1, node2)) | ||
return rg.freeze() | ||
|
||
|
||
def get_symbolic_circuit_on_rg(rg: RegionGraph) -> SymbolicCircuit: | ||
num_units = 4 | ||
input_cls = CategoricalLayer | ||
input_kwargs = {"num_categories": 256} | ||
inner_cls = CPLayer | ||
inner_kwargs: Dict[str, None] = {} # Avoid Any. | ||
reparam = ExpReparam() | ||
|
||
return SymbolicCircuit( | ||
rg, | ||
num_input_units=num_units, | ||
num_sum_units=num_units, | ||
input_layer_cls=input_cls, | ||
input_layer_kwargs=input_kwargs, | ||
input_reparam=reparam, | ||
sum_layer_cls=inner_cls, | ||
sum_layer_kwargs=inner_kwargs, | ||
sum_reparam=reparam, | ||
prod_layer_cls=inner_cls, | ||
prod_layer_kwargs=None, | ||
) |