pytorch/caffe2/python/layers/simple_operator_layers.py
Andrey Malevich 8047b8dc83 Fix random issues with some of the layers getting missing from registry.
Summary:
It looks like for the types that are created directly through type(...)
function call, we don't store the strong references anywhere. As a result
a GC call in Python might/or might not clean up these classes depending on the
phase of the moon and other random things. This results in a fact that in some
cases simple layers as a Relu might disappear.

cat_shame

Reviewed By: xianjiec

Differential Revision: D4396289

fbshipit-source-id: ba4e9b7ef54ee43349853b0acc3d3f40c74e4d73
2017-01-10 15:14:31 -08:00

75 lines
2.5 KiB
Python

from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from caffe2.python import schema
from caffe2.python.layers.layers import (
ModelLayer,
)
def simple_init(self, model, input_record, *args, **kwargs):
ModelLayer.__init__(self, model, self.operator, input_record, **kwargs)
assert self.operator is not None, "Try to create invalid operator layer"
self.args = args
self.output_schema = schema.NewRecord(self.model.net, input_record)
def first_field_schema_init(self, model, input_record, *args, **kwargs):
ModelLayer.__init__(self, model, self.operator, input_record, **kwargs)
assert self.operator is not None, "Try to create invalid operator layer"
assert isinstance(input_record, schema.Struct),\
"Operator {0} expects schema.Struct as input, received {1} instead".\
format(self.operator, input_record)
self.args = args
self.output_schema = schema.NewRecord(self.model.net, input_record[0])
def simple_add_ops(self, net):
getattr(
net,
self.operator)(
self.input_record.field_blobs(),
self.output_schema.field_blobs(),
*self.args,
**self.kwargs
)
_simple_operators = ['Softmax', 'Relu', 'Sigmoid', 'Tanh']
_first_field_schema_operators = ['Add', 'Sum']
# We need to store refs for all created types, to make sure that they won't be
# GCed before we actually register them.
_known_layers = []
for operator in _simple_operators:
# Generate class instance with name 'operator', that is doing going to use
# simple_init and simple_add_ops implementations for __init__ and add_ops
# calls. It'll also get automatically registered in the registry.
_known_layers.append(
type(
str(operator),
(ModelLayer,),
{'__init__': simple_init,
'add_ops': simple_add_ops,
'operator': operator
}
)
)
for operator in _first_field_schema_operators:
# Generate class instance with name 'operator', that is doing going to use
# first_field_schema_init and simple_add_ops implementations for __init__
# and add_ops calls. It'll also get automatically registered in the
# registry.
_known_layers.append(
type(
str(operator),
(ModelLayer,),
{'__init__': first_field_schema_init,
'add_ops': simple_add_ops,
'operator': operator
}
)
)