jactorch.quickstart.inference#

Classes

Class AsyncInferenceTask

class AsyncInferenceTask[source]#

Bases: object

__init__(feed_dict, future=None)[source]#
__new__(**kwargs)#
get_result()[source]#
put_result(result)[source]#
feed_dict#
future#

Class AsyncModelInferencer

class AsyncModelInferencer[source]#

Bases: ModelInferencer

__init__(model, nr_workers=1)[source]#
__new__(**kwargs)#
activate()#
finalize()[source]#
inference(feed_dict, future=None)[source]#
initialize()[source]#

Class BatchedAsyncModelInferencer

class BatchedAsyncModelInferencer[source]#

Bases: AsyncModelInferencer

__init__(model, nr_workers=1, batch_size=8, latency=10)[source]#
__new__(**kwargs)#
activate()#
finalize()#
inference(feed_dict, future=None)#
initialize()#

Class ModelInferencer

class ModelInferencer[source]#

Bases: object

__init__(model)[source]#
__new__(**kwargs)#
activate()[source]#
finalize()[source]#
inference(feed_dict)[source]#
initialize()[source]#