Models: [object detection] TypeError: can't pickle dict_values objects

Created on 16 Jul 2018  路  29Comments  路  Source: tensorflow/models

creating index...
index created!
2018-07-16 22:12:38.626883: W T:\src\github\tensorflow\tensorflow\core\framework
\op_kernel.cc:1306] Invalid argument: TypeError: can't pickle dict_values objects
Traceback (most recent call last):

File "d:\Program Files\Anaconda3\lib\site-packages\tensorflow\python\ops\scrip
t_ops.py", line 158, in __call__
ret = func(*args)

File "D:\Program Files\Anaconda3\Lib\site-packages\tensorflow\models\research\
object_detection\metrics\coco_evaluation.py", line 339, in first_value_func
self._metrics = self.evaluate()

File "D:\Program Files\Anaconda3\Lib\site-packages\tensorflow\models\research\
object_detection\metrics\coco_evaluation.py", line 193, in evaluate
self._detection_boxes_list)

File "D:\Program Files\Anaconda3\Lib\site-packages\tensorflow\models\research\
object_detection\metrics\coco_tools.py", line 118, in LoadAnnotations
results.dataset['categories'] = copy.deepcopy(self.dataset['categories'])

File "d:\Program Files\Anaconda3\lib\copy.py", line 174, in deepcopy
rv = reductor(4)

TypeError: can't pickle dict_values objects

Traceback (most recent call last):
File "d:\Program Files\Anaconda3\lib\site-packages\tensorflow\python\client\se
ssion.py", line 1322, in _do_call
return fn(*args)
File "d:\Program Files\Anaconda3\lib\site-packages\tensorflow\python\client\se
ssion.py", line 1307, in _run_fn
options, feed_dict, fetch_list, target_list, run_metadata)
File "d:\Program Files\Anaconda3\lib\site-packages\tensorflow\python\client\se
ssion.py", line 1409, in _call_tf_sessionrun
run_metadata)
tensorflow.python.framework.errors_impl.InvalidArgumentError: TypeError: can't p
ickle dict_values objects
Traceback (most recent call last):

File "d:\Program Files\Anaconda3\lib\site-packages\tensorflow\python\ops\scrip
t_ops.py", line 158, in __call__
ret = func(*args)

File "D:\Program Files\Anaconda3\Lib\site-packages\tensorflow\models\research\
object_detection\metrics\coco_evaluation.py", line 339, in first_value_func
self._metrics = self.evaluate()

File "D:\Program Files\Anaconda3\Lib\site-packages\tensorflow\models\research\
object_detection\metrics\coco_evaluation.py", line 193, in evaluate
self._detection_boxes_list)

File "D:\Program Files\Anaconda3\Lib\site-packages\tensorflow\models\research\
object_detection\metrics\coco_tools.py", line 118, in LoadAnnotations
results.dataset['categories'] = copy.deepcopy(self.dataset['categories'])

File "d:\Program Files\Anaconda3\lib\copy.py", line 174, in deepcopy
rv = reductor(4)

TypeError: can't pickle dict_values objects

     [[Node: PyFunc_1 = PyFunc[Tin=[], Tout=[DT_FLOAT], token="pyfunc_3", _d

evice="/job:localhost/replica:0/task:0/device:CPU:0"]()]]

During handling of the above exception, another exception occurred:

Traceback (most recent call last):
File "object_detection\model_main.py", line 101, in
tf.app.run()
File "d:\Program Files\Anaconda3\lib\site-packages\tensorflow\python\platform\
app.py", line 125, in run
_sys.exit(main(argv))
File "object_detection\model_main.py", line 97, in main
tf.estimator.train_and_evaluate(estimator, train_spec, eval_specs[0])
File "d:\Program Files\Anaconda3\lib\site-packages\tensorflow\python\estimator
\training.py", line 447, in train_and_evaluate
return executor.run()
File "d:\Program Files\Anaconda3\lib\site-packages\tensorflow\python\estimator
\training.py", line 531, in run
return self.run_local()
File "d:\Program Files\Anaconda3\lib\site-packages\tensorflow\python\estimator
\training.py", line 681, in run_local
eval_result, export_results = evaluator.evaluate_and_export()
File "d:\Program Files\Anaconda3\lib\site-packages\tensorflow\python\estimator
\training.py", line 886, in evaluate_and_export
hooks=self._eval_spec.hooks)
File "d:\Program Files\Anaconda3\lib\site-packages\tensorflow\python\estimator
\estimator.py", line 460, in evaluate
output_dir=self.eval_dir(name))
File "d:\Program Files\Anaconda3\lib\site-packages\tensorflow\python\estimator
\estimator.py", line 1386, in _evaluate_run
config=self._session_config)
File "d:\Program Files\Anaconda3\lib\site-packages\tensorflow\python\training\
evaluation.py", line 212, in _evaluate_once
session.run(eval_ops, feed_dict)
File "d:\Program Files\Anaconda3\lib\site-packages\tensorflow\python\training\
monitored_session.py", line 689, in __exit__
self._close_internal(exception_type)
File "d:\Program Files\Anaconda3\lib\site-packages\tensorflow\python\training\
monitored_session.py", line 721, in _close_internal
h.end(self._coordinated_creator.tf_sess)
File "d:\Program Files\Anaconda3\lib\site-packages\tensorflow\python\training\
basic_session_run_hooks.py", line 824, in end
self._final_ops, feed_dict=self._final_ops_feed_dict)
File "d:\Program Files\Anaconda3\lib\site-packages\tensorflow\python\client\se
ssion.py", line 900, in run
run_metadata_ptr)
File "d:\Program Files\Anaconda3\lib\site-packages\tensorflow\python\client\se
ssion.py", line 1135, in _run
feed_dict_tensor, options, run_metadata)
File "d:\Program Files\Anaconda3\lib\site-packages\tensorflow\python\client\se
ssion.py", line 1316, in _do_run
run_metadata)
File "d:\Program Files\Anaconda3\lib\site-packages\tensorflow\python\client\se
ssion.py", line 1335, in _do_call
raise type(e)(node_def, op, message)
tensorflow.python.framework.errors_impl.InvalidArgumentError: TypeError: can't p
ickle dict_values objects
Traceback (most recent call last):

File "d:\Program Files\Anaconda3\lib\site-packages\tensorflow\python\ops\scrip
t_ops.py", line 158, in __call__
ret = func(*args)

File "D:\Program Files\Anaconda3\Lib\site-packages\tensorflow\models\research\
object_detection\metrics\coco_evaluation.py", line 339, in first_value_func
self._metrics = self.evaluate()

File "D:\Program Files\Anaconda3\Lib\site-packages\tensorflow\models\research\
object_detection\metrics\coco_evaluation.py", line 193, in evaluate
self._detection_boxes_list)

File "D:\Program Files\Anaconda3\Lib\site-packages\tensorflow\models\research\
object_detection\metrics\coco_tools.py", line 118, in LoadAnnotations
results.dataset['categories'] = copy.deepcopy(self.dataset['categories'])

File "d:\Program Files\Anaconda3\lib\copy.py", line 174, in deepcopy
rv = reductor(4)

TypeError: can't pickle dict_values objects

     [[Node: PyFunc_1 = PyFunc[Tin=[], Tout=[DT_FLOAT], token="pyfunc_3", _d

evice="/job:localhost/replica:0/task:0/device:CPU:0"]()]]

Caused by op 'PyFunc_1', defined at:
File "object_detection\model_main.py", line 101, in
tf.app.run()
File "d:\Program Files\Anaconda3\lib\site-packages\tensorflow\python\platform\
app.py", line 125, in run
_sys.exit(main(argv))
File "object_detection\model_main.py", line 97, in main
tf.estimator.train_and_evaluate(estimator, train_spec, eval_specs[0])
File "d:\Program Files\Anaconda3\lib\site-packages\tensorflow\python\estimator
\training.py", line 447, in train_and_evaluate
return executor.run()
File "d:\Program Files\Anaconda3\lib\site-packages\tensorflow\python\estimator
\training.py", line 531, in run
return self.run_local()
File "d:\Program Files\Anaconda3\lib\site-packages\tensorflow\python\estimator
\training.py", line 681, in run_local
eval_result, export_results = evaluator.evaluate_and_export()
File "d:\Program Files\Anaconda3\lib\site-packages\tensorflow\python\estimator
\training.py", line 886, in evaluate_and_export
hooks=self._eval_spec.hooks)
File "d:\Program Files\Anaconda3\lib\site-packages\tensorflow\python\estimator
\estimator.py", line 453, in evaluate
input_fn, hooks, checkpoint_path)
File "d:\Program Files\Anaconda3\lib\site-packages\tensorflow\python\estimator
\estimator.py", line 1348, in _evaluate_build_graph
features, labels, model_fn_lib.ModeKeys.EVAL, self.config)
File "d:\Program Files\Anaconda3\lib\site-packages\tensorflow\python\estimator
\estimator.py", line 1107, in _call_model_fn
model_fn_results = self._model_fn(features=features, **kwargs)
File "D:\Program Files\Anaconda3\Lib\site-packages\tensorflow\models\research\
object_detection\model_lib.py", line 383, in model_fn
include_metrics_per_category=eval_config.include_metrics_per_category)
File "D:\Program Files\Anaconda3\Lib\site-packages\tensorflow\models\research\
object_detection\eval_util.py", line 629, in get_eval_metric_ops_for_evaluators
input_data_fields.groundtruth_is_crowd)))
File "D:\Program Files\Anaconda3\Lib\site-packages\tensorflow\models\research\
object_detection\metrics\coco_evaluation.py", line 349, in get_estimator_eval_me
tric_ops
first_value_op = tf.py_func(first_value_func, [], tf.float32)
File "d:\Program Files\Anaconda3\lib\site-packages\tensorflow\python\ops\scrip
t_ops.py", line 384, in py_func
func=func, inp=inp, Tout=Tout, stateful=stateful, eager=False, name=name)
File "d:\Program Files\Anaconda3\lib\site-packages\tensorflow\python\ops\scrip
t_ops.py", line 227, in _internal_py_func
input=inp, token=token, Tout=Tout, name=name)
File "d:\Program Files\Anaconda3\lib\site-packages\tensorflow\python\ops\gen_s
cript_ops.py", line 130, in py_func
"PyFunc", input=input, token=token, Tout=Tout, name=name)
File "d:\Program Files\Anaconda3\lib\site-packages\tensorflow\python\framework
\op_def_library.py", line 787, in _apply_op_helper
op_def=op_def)
File "d:\Program Files\Anaconda3\lib\site-packages\tensorflow\python\framework
\ops.py", line 3414, in create_op
op_def=op_def)
File "d:\Program Files\Anaconda3\lib\site-packages\tensorflow\python\framework
\ops.py", line 1740, in __init__
self._traceback = self._graph._extract_stack() # pylint: disable=protected-
access

InvalidArgumentError (see above for traceback): TypeError: can't pickle dict_val
ues objects
Traceback (most recent call last):

File "d:\Program Files\Anaconda3\lib\site-packages\tensorflow\python\ops\scrip
t_ops.py", line 158, in __call__
ret = func(*args)

File "D:\Program Files\Anaconda3\Lib\site-packages\tensorflow\models\research\
object_detection\metrics\coco_evaluation.py", line 339, in first_value_func
self._metrics = self.evaluate()

File "D:\Program Files\Anaconda3\Lib\site-packages\tensorflow\models\research\
object_detection\metrics\coco_evaluation.py", line 193, in evaluate
self._detection_boxes_list)

File "D:\Program Files\Anaconda3\Lib\site-packages\tensorflow\models\research\
object_detection\metrics\coco_tools.py", line 118, in LoadAnnotations
results.dataset['categories'] = copy.deepcopy(self.dataset['categories'])

File "d:\Program Files\Anaconda3\lib\copy.py", line 174, in deepcopy
rv = reductor(4)

TypeError: can't pickle dict_values objects

     [[Node: PyFunc_1 = PyFunc[Tin=[], Tout=[DT_FLOAT], token="pyfunc_3", _d

evice="/job:localhost/replica:0/task:0/device:CPU:0"]()]]

Most helpful comment

If you're using python3 , add list() to category_index.values() in model_lib.py about line 381 as this list(category_index.values()).

All 29 comments

Were you using python2 or 3?

I am seeing the same issue, with python 3.6

I'm not 100% sure but could you try with python2 ?

If you're using python3 , add list() to category_index.values() in model_lib.py about line 381 as this list(category_index.values()).

Thank you very much for your help, my problem is solved, I did use Python 3.5

I had the same problem with Python 3.6.5. I then created a Python 2.7.15 virtualenv and ran through the tf obj det installation requirements. It ran perfectly on a reasonably sized dataset (nearly 10,000 images). So clearly, this and about a half dozen other issues that I found (and others have already submitted) are all related to incompatibilities with Python 3.

Same problem and solved with MaeThird fix. Thanks!
I think support for pyhon3 should be added.

got a same issue and solved with @MaeThird 's fix. Thanks!

聽Solved with @MaeThird 's fix. model_lib.py is in models/research/object_detection/ and category_index.values() was at line 390 for me.

@MaeThird Thanks.

Got the same error and tried @MaeThird 's fix, but I still get the same error "TypeError: can't pickle dict_values objects". Any idea how to solve this?

Hi Everyone,
I am getting the same error on python 3.7 and tried @MaeThird 's fix, but no luck. Kindly help me to fix this issue.

@auth.required_features(features=["fval1"])
def get(self, customer):
   pass

    @auth.required_features(features=["fval1"])
  File "/usr/local/lib/python3.7/site-packages/flask_restplus/namespace.py", line 119, in wrapper
    self._handle_api_doc(documented, kwargs if show else False)
  File "/usr/local/lib/python3.7/site-packages/flask_restplus/namespace.py", line 110, in _handle_api_doc
    cls.__apidoc__ = merge(getattr(cls, '__apidoc__', {}), doc)
  File "/usr/local/lib/python3.7/site-packages/flask_restplus/utils.py", line 39, in merge
    result[key] = deepcopy(value)
  File "/usr/local/lib/python3.7/copy.py", line 150, in deepcopy
    y = copier(x, memo)
  File "/usr/local/lib/python3.7/copy.py", line 215, in _deepcopy_list
    append(deepcopy(a, memo))
  File "/usr/local/lib/python3.7/copy.py", line 180, in deepcopy
    y = _reconstruct(x, memo, *rv)
  File "/usr/local/lib/python3.7/copy.py", line 280, in _reconstruct
    state = deepcopy(state, memo)
  File "/usr/local/lib/python3.7/copy.py", line 150, in deepcopy
    y = copier(x, memo)
  File "/usr/local/lib/python3.7/copy.py", line 240, in _deepcopy_dict
    y[deepcopy(key, memo)] = deepcopy(value, memo)
  File "/usr/local/lib/python3.7/copy.py", line 150, in deepcopy
    y = copier(x, memo)
  File "/usr/local/lib/python3.7/copy.py", line 215, in _deepcopy_list
    append(deepcopy(a, memo))
  File "/usr/local/lib/python3.7/copy.py", line 180, in deepcopy
    y = _reconstruct(x, memo, *rv)
  File "/usr/local/lib/python3.7/copy.py", line 280, in _reconstruct
    state = deepcopy(state, memo)
  File "/usr/local/lib/python3.7/copy.py", line 150, in deepcopy
    y = copier(x, memo)
  File "/usr/local/lib/python3.7/copy.py", line 240, in _deepcopy_dict
    y[deepcopy(key, memo)] = deepcopy(value, memo)
  File "/usr/local/lib/python3.7/copy.py", line 169, in deepcopy
    rv = reductor(4)
TypeError: can't pickle dict_values objects

I tried the fix as well. For me it is still producing the same error. I am using Python 3.6

creating index...
index created!
2018-09-10 07:48:14.389071: W tensorflow/core/framework/op_kernel.cc:1263] Invalid argument: TypeError: can't pickle dict_values objects
Traceback (most recent call last):

  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/ops/script_ops.py", line 206, in __call__
    ret = func(*args)

  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/object_detection-0.1-py3.6.egg/object_detection/metrics/coco_evaluation.py", line 332, in first_value_func
    self._metrics = self.evaluate()

  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/object_detection-0.1-py3.6.egg/object_detection/metrics/coco_evaluation.py", line 193, in evaluate
    self._detection_boxes_list)

  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/object_detection-0.1-py3.6.egg/object_detection/metrics/coco_tools.py", line 118, in LoadAnnotations
    results.dataset['categories'] = copy.deepcopy(self.dataset['categories'])

  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/copy.py", line 169, in deepcopy
    rv = reductor(4)

TypeError: can't pickle dict_values objects


Traceback (most recent call last):
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/client/session.py", line 1278, in _do_call
    return fn(*args)
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/client/session.py", line 1263, in _run_fn
    options, feed_dict, fetch_list, target_list, run_metadata)
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/client/session.py", line 1350, in _call_tf_sessionrun
    run_metadata)
tensorflow.python.framework.errors_impl.InvalidArgumentError: TypeError: can't pickle dict_values objects
Traceback (most recent call last):

  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/ops/script_ops.py", line 206, in __call__
    ret = func(*args)

  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/object_detection-0.1-py3.6.egg/object_detection/metrics/coco_evaluation.py", line 332, in first_value_func
    self._metrics = self.evaluate()

  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/object_detection-0.1-py3.6.egg/object_detection/metrics/coco_evaluation.py", line 193, in evaluate
    self._detection_boxes_list)

  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/object_detection-0.1-py3.6.egg/object_detection/metrics/coco_tools.py", line 118, in LoadAnnotations
    results.dataset['categories'] = copy.deepcopy(self.dataset['categories'])

  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/copy.py", line 169, in deepcopy
    rv = reductor(4)

TypeError: can't pickle dict_values objects


     [[Node: PyFunc_1 = PyFunc[Tin=[], Tout=[DT_FLOAT], token="pyfunc_7", _device="/job:localhost/replica:0/task:0/device:CPU:0"]()]]

During handling of the above exception, another exception occurred:

Traceback (most recent call last):
  File "object_detection/model_main.py", line 103, in <module>
    tf.app.run()
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/platform/app.py", line 125, in run
    _sys.exit(main(argv))
  File "object_detection/model_main.py", line 99, in main
    tf.estimator.train_and_evaluate(estimator, train_spec, eval_specs[0])
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/estimator/training.py", line 451, in train_and_evaluate
    return executor.run()
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/estimator/training.py", line 590, in run
    return self.run_local()
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/estimator/training.py", line 691, in run_local
    saving_listeners=saving_listeners)
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/estimator/estimator.py", line 376, in train
    loss = self._train_model(input_fn, hooks, saving_listeners)
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/estimator/estimator.py", line 1145, in _train_model
    return self._train_model_default(input_fn, hooks, saving_listeners)
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/estimator/estimator.py", line 1173, in _train_model_default
    saving_listeners)
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/estimator/estimator.py", line 1451, in _train_with_estimator_spec
    _, loss = mon_sess.run([estimator_spec.train_op, estimator_spec.loss])
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/training/monitored_session.py", line 695, in __exit__
    self._close_internal(exception_type)
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/training/monitored_session.py", line 727, in _close_internal
    h.end(self._coordinated_creator.tf_sess)
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/training/basic_session_run_hooks.py", line 470, in end
    self._save(session, last_step)
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/training/basic_session_run_hooks.py", line 489, in _save
    if l.after_save(session, step):
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/estimator/training.py", line 497, in after_save
    self._evaluate(global_step_value)  # updates self.eval_result
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/estimator/training.py", line 517, in _evaluate
    self._evaluator.evaluate_and_export())
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/estimator/training.py", line 884, in evaluate_and_export
    hooks=self._eval_spec.hooks)
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/estimator/estimator.py", line 470, in evaluate
    output_dir=self.eval_dir(name))
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/estimator/estimator.py", line 1501, in _evaluate_run
    config=self._session_config)
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/training/evaluation.py", line 212, in _evaluate_once
    session.run(eval_ops, feed_dict)
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/training/monitored_session.py", line 695, in __exit__
    self._close_internal(exception_type)
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/training/monitored_session.py", line 727, in _close_internal
    h.end(self._coordinated_creator.tf_sess)
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/training/basic_session_run_hooks.py", line 824, in end
    self._final_ops, feed_dict=self._final_ops_feed_dict)
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/client/session.py", line 877, in run
    run_metadata_ptr)
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/client/session.py", line 1100, in _run
    feed_dict_tensor, options, run_metadata)
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/client/session.py", line 1272, in _do_run
    run_metadata)
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/client/session.py", line 1291, in _do_call
    raise type(e)(node_def, op, message)
tensorflow.python.framework.errors_impl.InvalidArgumentError: TypeError: can't pickle dict_values objects
Traceback (most recent call last):

  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/ops/script_ops.py", line 206, in __call__
    ret = func(*args)

  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/object_detection-0.1-py3.6.egg/object_detection/metrics/coco_evaluation.py", line 332, in first_value_func
    self._metrics = self.evaluate()

  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/object_detection-0.1-py3.6.egg/object_detection/metrics/coco_evaluation.py", line 193, in evaluate
    self._detection_boxes_list)

  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/object_detection-0.1-py3.6.egg/object_detection/metrics/coco_tools.py", line 118, in LoadAnnotations
    results.dataset['categories'] = copy.deepcopy(self.dataset['categories'])

  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/copy.py", line 169, in deepcopy
    rv = reductor(4)

TypeError: can't pickle dict_values objects


     [[Node: PyFunc_1 = PyFunc[Tin=[], Tout=[DT_FLOAT], token="pyfunc_7", _device="/job:localhost/replica:0/task:0/device:CPU:0"]()]]

Caused by op 'PyFunc_1', defined at:
  File "object_detection/model_main.py", line 103, in <module>
    tf.app.run()
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/platform/app.py", line 125, in run
    _sys.exit(main(argv))
  File "object_detection/model_main.py", line 99, in main
    tf.estimator.train_and_evaluate(estimator, train_spec, eval_specs[0])
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/estimator/training.py", line 451, in train_and_evaluate
    return executor.run()
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/estimator/training.py", line 590, in run
    return self.run_local()
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/estimator/training.py", line 691, in run_local
    saving_listeners=saving_listeners)
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/estimator/estimator.py", line 376, in train
    loss = self._train_model(input_fn, hooks, saving_listeners)
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/estimator/estimator.py", line 1145, in _train_model
    return self._train_model_default(input_fn, hooks, saving_listeners)
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/estimator/estimator.py", line 1173, in _train_model_default
    saving_listeners)
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/estimator/estimator.py", line 1451, in _train_with_estimator_spec
    _, loss = mon_sess.run([estimator_spec.train_op, estimator_spec.loss])
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/training/monitored_session.py", line 695, in __exit__
    self._close_internal(exception_type)
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/training/monitored_session.py", line 727, in _close_internal
    h.end(self._coordinated_creator.tf_sess)
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/training/basic_session_run_hooks.py", line 470, in end
    self._save(session, last_step)
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/training/basic_session_run_hooks.py", line 489, in _save
    if l.after_save(session, step):
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/estimator/training.py", line 497, in after_save
    self._evaluate(global_step_value)  # updates self.eval_result
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/estimator/training.py", line 517, in _evaluate
    self._evaluator.evaluate_and_export())
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/estimator/training.py", line 884, in evaluate_and_export
    hooks=self._eval_spec.hooks)
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/estimator/estimator.py", line 463, in evaluate
    input_fn, hooks, checkpoint_path)
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/estimator/estimator.py", line 1463, in _evaluate_build_graph
    features, labels, model_fn_lib.ModeKeys.EVAL, self.config)
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/estimator/estimator.py", line 1133, in _call_model_fn
    model_fn_results = self._model_fn(features=features, **kwargs)
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/object_detection-0.1-py3.6.egg/object_detection/model_lib.py", line 391, in model_fn
    eval_dict)
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/object_detection-0.1-py3.6.egg/object_detection/eval_util.py", line 656, in get_eval_metric_ops_for_evaluators
    eval_dict))
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/object_detection-0.1-py3.6.egg/object_detection/metrics/coco_evaluation.py", line 342, in get_estimator_eval_metric_ops
    first_value_op = tf.py_func(first_value_func, [], tf.float32)
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/ops/script_ops.py", line 456, in py_func
    func=func, inp=inp, Tout=Tout, stateful=stateful, eager=False, name=name)
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/ops/script_ops.py", line 281, in _internal_py_func
    input=inp, token=token, Tout=Tout, name=name)
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/ops/gen_script_ops.py", line 128, in py_func
    "PyFunc", input=input, token=token, Tout=Tout, name=name)
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/framework/op_def_library.py", line 787, in _apply_op_helper
    op_def=op_def)
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/util/deprecation.py", line 454, in new_func
    return func(*args, **kwargs)
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/framework/ops.py", line 3155, in create_op
    op_def=op_def)
  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/framework/ops.py", line 1717, in __init__
    self._traceback = tf_stack.extract_stack()

InvalidArgumentError (see above for traceback): TypeError: can't pickle dict_values objects
Traceback (most recent call last):

  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/tensorflow/python/ops/script_ops.py", line 206, in __call__
    ret = func(*args)

  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/object_detection-0.1-py3.6.egg/object_detection/metrics/coco_evaluation.py", line 332, in first_value_func
    self._metrics = self.evaluate()

  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/object_detection-0.1-py3.6.egg/object_detection/metrics/coco_evaluation.py", line 193, in evaluate
    self._detection_boxes_list)

  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/object_detection-0.1-py3.6.egg/object_detection/metrics/coco_tools.py", line 118, in LoadAnnotations
    results.dataset['categories'] = copy.deepcopy(self.dataset['categories'])

  File "/home/vasa/dev/tensorflow/venv/lib/python3.6/copy.py", line 169, in deepcopy
    rv = reductor(4)

TypeError: can't pickle dict_values objects


     [[Node: PyFunc_1 = PyFunc[Tin=[], Tout=[DT_FLOAT], token="pyfunc_7", _device="/job:localhost/replica:0/task:0/device:CPU:0"]()]]


I can confirm what @RocketRider said. Fix does not work... I got exactly the same error as he received.

Closing this since there is a clear answer already. We will also fix py3 compatibility issue in future releases. Thanks!

@RocketRider looks like you're loading object_detection from site-packages; does changing the line in /home/vasa/dev/tensorflow/venv/lib/python3.6/site-packages/object_detection-0.1-py3.6.egg/object_detection/model_lib.py fix the issue?

If you're using python3 , add list() to category_index.values() in model_lib.py about line 414 as this list(category_index.values()). eval_util.get_eval_metric_ops_for_evalutors
thanks @MaeThird 锛宨 am using python 3.6, also get the same error "TypeError: can't pickle dict_values objects".

InvalidArgumentError (see above for traceback): TypeError: can't pickle dict_values objects
Traceback (most recent call last):

File "/home/dptandroid/.local/lib/python3.5/site-packages/tensorflow/python/ops/script_ops.py", line 158, in __call__
ret = func(*args)

File "/home/dptandroid/zhanjie/20181018/models/research/object_detection/metrics/coco_evaluation.py", line 346, in first_value_func
self._metrics = self.evaluate()

File "/home/dptandroid/zhanjie/20181018/models/research/object_detection/metrics/coco_evaluation.py", line 207, in evaluate
self._detection_boxes_list)

File "/home/dptandroid/zhanjie/20181018/models/research/object_detection/metrics/coco_tools.py", line 118, in LoadAnnotations
results.dataset['categories'] = copy.deepcopy(self.dataset['categories'])

File "/usr/lib/python3.5/copy.py", line 174, in deepcopy
rv = reductor(4)

TypeError: can't pickle dict_values objects
I fix the problem ,at object_detection/metrics/coco_tools.py 118 line,use copy.deepcopy(len(self.dataset['categories'])) fix the problem

This doesnt seem to fix the problem

I found it at at line 411

@Glomels ctrl+f try it sometimes

thank you! it help! i used with python3.6 @MaeThird

my problem is solved by training with train.py script. But i am unable to run model_main.py
I am getting following error:

tensorflow/core/framework/op_kernel.cc:1261] Invalid argument: TypeError: 'int' object is not iterable
Traceback (most recent call last):

File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/ops/script_ops.py", line 206, in __call__
ret = func(*args)

File "/usr/local/lib/python3.4/dist-packages/object_detection-0.1-py3.4.egg/object_detection/metrics/coco_evaluation.py", line 358, in first_value_func
self._metrics = self.evaluate()

File "/usr/local/lib/python3.4/dist-packages/object_detection-0.1-py3.4.egg/object_detection/metrics/coco_evaluation.py", line 207, in evaluate
self._detection_boxes_list)

File "/usr/local/lib/python3.4/dist-packages/object_detection-0.1-py3.4.egg/object_detection/metrics/coco_tools.py", line 134, in LoadAnnotations
results.createIndex()

File "/usr/local/lib/python3.4/dist-packages/pycocotools/coco.py", line 106, in createIndex
for cat in self.dataset['categories']:

TypeError: 'int' object is not iterable

Traceback (most recent call last):
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/client/session.py", line 1292, in _do_call
return fn(*args)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/client/session.py", line 1277, in _run_fn
options, feed_dict, fetch_list, target_list, run_metadata)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/client/session.py", line 1367, in _call_tf_sessionrun
run_metadata)
tensorflow.python.framework.errors_impl.OutOfRangeError: End of sequence
[[{{node IteratorGetNext}} = IteratorGetNext[output_shapes=[[1], [1,300,300,3], [1,300,300,3], [1,2], [1,3], [1,100], [1,100,4], [1,100,1], [1,100,1], [1,100], [1,100], [1,100], [1]], output_types=[DT_INT32, DT_FLOAT, DT_UINT8, DT_INT32, DT_INT32, DT_FLOAT, DT_FLOAT, DT_FLOAT, DT_FLOAT, DT_INT32, DT_BOOL, DT_FLOAT, DT_INT32], _device="/job:localhost/replica:0/task:0/device:CPU:0"](IteratorV2)]]

During handling of the above exception, another exception occurred:

Traceback (most recent call last):
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/training/evaluation.py", line 212, in _evaluate_once
session.run(eval_ops, feed_dict)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/training/monitored_session.py", line 671, in run
run_metadata=run_metadata)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/training/monitored_session.py", line 1148, in run
run_metadata=run_metadata)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/training/monitored_session.py", line 1239, in run
raise six.reraise(original_exc_info)
File "/usr/local/lib/python3.4/dist-packages/six.py", line 693, in reraise
raise value
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/training/monitored_session.py", line 1224, in run
return self._sess.run(
args, *kwargs)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/training/monitored_session.py", line 1296, in run
run_metadata=run_metadata)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/training/monitored_session.py", line 1076, in run
return self._sess.run(
args, **kwargs)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/client/session.py", line 887, in run
run_metadata_ptr)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/client/session.py", line 1110, in _run
feed_dict_tensor, options, run_metadata)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/client/session.py", line 1286, in _do_run
run_metadata)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/client/session.py", line 1308, in _do_call
raise type(e)(node_def, op, message)
tensorflow.python.framework.errors_impl.OutOfRangeError: End of sequence
[[{{node IteratorGetNext}} = IteratorGetNext[output_shapes=[[1], [1,300,300,3], [1,300,300,3], [1,2], [1,3], [1,100], [1,100,4], [1,100,1], [1,100,1], [1,100], [1,100], [1,100], [1]], output_types=[DT_INT32, DT_FLOAT, DT_UINT8, DT_INT32, DT_INT32, DT_FLOAT, DT_FLOAT, DT_FLOAT, DT_FLOAT, DT_INT32, DT_BOOL, DT_FLOAT, DT_INT32], _device="/job:localhost/replica:0/task:0/device:CPU:0"](IteratorV2)]]

Caused by op 'IteratorGetNext', defined at:
File "legacy/model_main.py", line 109, in
tf.app.run()
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/platform/app.py", line 125, in run
_sys.exit(main(argv))
File "legacy/model_main.py", line 105, in main
tf.estimator.train_and_evaluate(estimator, train_spec, eval_specs[0])
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/estimator/training.py", line 471, in train_and_evaluate
return executor.run()
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/estimator/training.py", line 610, in run
return self.run_local()
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/estimator/training.py", line 711, in run_local
saving_listeners=saving_listeners)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/estimator/estimator.py", line 356, in train
loss = self._train_model(input_fn, hooks, saving_listeners)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/estimator/estimator.py", line 1181, in _train_model
return self._train_model_default(input_fn, hooks, saving_listeners)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/estimator/estimator.py", line 1215, in _train_model_default
saving_listeners)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/estimator/estimator.py", line 1409, in _train_with_estimator_spec
_, loss = mon_sess.run([estimator_spec.train_op, estimator_spec.loss])
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/training/monitored_session.py", line 671, in run
run_metadata=run_metadata)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/training/monitored_session.py", line 1148, in run
run_metadata=run_metadata)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/training/monitored_session.py", line 1224, in run
return self._sess.run(args, *kwargs)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/training/monitored_session.py", line 1304, in run
run_metadata=run_metadata))
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/training/basic_session_run_hooks.py", line 581, in after_run
if self._save(run_context.session, global_step):
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/training/basic_session_run_hooks.py", line 606, in _save
if l.after_save(session, step):
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/estimator/training.py", line 517, in after_save
self._evaluate(global_step_value) # updates self.eval_result
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/estimator/training.py", line 537, in _evaluate
self._evaluator.evaluate_and_export())
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/estimator/training.py", line 912, in evaluate_and_export
hooks=self._eval_spec.hooks)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/estimator/estimator.py", line 476, in evaluate
return _evaluate()
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/estimator/estimator.py", line 462, in _evaluate
self._evaluate_build_graph(input_fn, hooks, checkpoint_path))
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/estimator/estimator.py", line 1422, in _evaluate_build_graph
self._call_model_fn_eval(input_fn, self.config))
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/estimator/estimator.py", line 1455, in _call_model_fn_eval
input_fn, model_fn_lib.ModeKeys.EVAL)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/estimator/estimator.py", line 1049, in _get_features_and_labels_from_input_fn
self._call_input_fn(input_fn, mode))
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/estimator/util.py", line 111, in parse_input_fn_result
result = iterator.get_next()
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/data/ops/iterator_ops.py", line 420, in get_next
name=name)), self._output_types,
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/ops/gen_dataset_ops.py", line 2069, in iterator_get_next
output_shapes=output_shapes, name=name)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/framework/op_def_library.py", line 787, in _apply_op_helper
op_def=op_def)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/util/deprecation.py", line 488, in new_func
return func(args, *kwargs)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/framework/ops.py", line 3272, in create_op
op_def=op_def)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/framework/ops.py", line 1768, in __init__
self._traceback = tf_stack.extract_stack()

OutOfRangeError (see above for traceback): End of sequence
[[{{node IteratorGetNext}} = IteratorGetNext[output_shapes=[[1], [1,300,300,3], [1,300,300,3], [1,2], [1,3], [1,100], [1,100,4], [1,100,1], [1,100,1], [1,100], [1,100], [1,100], [1]], output_types=[DT_INT32, DT_FLOAT, DT_UINT8, DT_INT32, DT_INT32, DT_FLOAT, DT_FLOAT, DT_FLOAT, DT_FLOAT, DT_INT32, DT_BOOL, DT_FLOAT, DT_INT32], _device="/job:localhost/replica:0/task:0/device:CPU:0"](IteratorV2)]]

During handling of the above exception, another exception occurred:

Traceback (most recent call last):
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/client/session.py", line 1292, in _do_call
return fn(*args)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/client/session.py", line 1277, in _run_fn
options, feed_dict, fetch_list, target_list, run_metadata)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/client/session.py", line 1367, in _call_tf_sessionrun
run_metadata)
tensorflow.python.framework.errors_impl.InvalidArgumentError: TypeError: 'int' object is not iterable
Traceback (most recent call last):

File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/ops/script_ops.py", line 206, in __call__
ret = func(*args)

File "/usr/local/lib/python3.4/dist-packages/object_detection-0.1-py3.4.egg/object_detection/metrics/coco_evaluation.py", line 358, in first_value_func
self._metrics = self.evaluate()

File "/usr/local/lib/python3.4/dist-packages/object_detection-0.1-py3.4.egg/object_detection/metrics/coco_evaluation.py", line 207, in evaluate
self._detection_boxes_list)

File "/usr/local/lib/python3.4/dist-packages/object_detection-0.1-py3.4.egg/object_detection/metrics/coco_tools.py", line 134, in LoadAnnotations
results.createIndex()

File "/usr/local/lib/python3.4/dist-packages/pycocotools/coco.py", line 106, in createIndex
for cat in self.dataset['categories']:

TypeError: 'int' object is not iterable

 [[{{node PyFunc_3}} = PyFunc[Tin=[], Tout=[DT_FLOAT], token="pyfunc_5", _device="/job:localhost/replica:0/task:0/device:CPU:0"]()]]

During handling of the above exception, another exception occurred:

Traceback (most recent call last):
File "legacy/model_main.py", line 109, in
tf.app.run()
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/platform/app.py", line 125, in run
_sys.exit(main(argv))
File "legacy/model_main.py", line 105, in main
tf.estimator.train_and_evaluate(estimator, train_spec, eval_specs[0])
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/estimator/training.py", line 471, in train_and_evaluate
return executor.run()
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/estimator/training.py", line 610, in run
return self.run_local()
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/estimator/training.py", line 711, in run_local
saving_listeners=saving_listeners)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/estimator/estimator.py", line 356, in train
loss = self._train_model(input_fn, hooks, saving_listeners)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/estimator/estimator.py", line 1181, in _train_model
return self._train_model_default(input_fn, hooks, saving_listeners)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/estimator/estimator.py", line 1215, in _train_model_default
saving_listeners)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/estimator/estimator.py", line 1409, in _train_with_estimator_spec
_, loss = mon_sess.run([estimator_spec.train_op, estimator_spec.loss])
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/training/monitored_session.py", line 671, in run
run_metadata=run_metadata)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/training/monitored_session.py", line 1148, in run
run_metadata=run_metadata)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/training/monitored_session.py", line 1239, in run
raise six.reraise(original_exc_info)
File "/usr/local/lib/python3.4/dist-packages/six.py", line 693, in reraise
raise value
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/training/monitored_session.py", line 1224, in run
return self._sess.run(
args, **kwargs)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/training/monitored_session.py", line 1304, in run
run_metadata=run_metadata))
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/training/basic_session_run_hooks.py", line 581, in after_run
if self._save(run_context.session, global_step):
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/training/basic_session_run_hooks.py", line 606, in _save
if l.after_save(session, step):
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/estimator/training.py", line 517, in after_save
self._evaluate(global_step_value) # updates self.eval_result
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/estimator/training.py", line 537, in _evaluate
self._evaluator.evaluate_and_export())
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/estimator/training.py", line 912, in evaluate_and_export
hooks=self._eval_spec.hooks)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/estimator/estimator.py", line 476, in evaluate
return _evaluate()
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/estimator/estimator.py", line 469, in _evaluate
output_dir=self.eval_dir(name))
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/estimator/estimator.py", line 1528, in _evaluate_run
config=self._session_config)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/training/evaluation.py", line 212, in _evaluate_once
session.run(eval_ops, feed_dict)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/training/monitored_session.py", line 783, in __exit__
self._close_internal(exception_type)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/training/monitored_session.py", line 816, in _close_internal
h.end(self._coordinated_creator.tf_sess)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/training/basic_session_run_hooks.py", line 941, in end
self._final_ops, feed_dict=self._final_ops_feed_dict)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/client/session.py", line 887, in run
run_metadata_ptr)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/client/session.py", line 1110, in _run
feed_dict_tensor, options, run_metadata)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/client/session.py", line 1286, in _do_run
run_metadata)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/client/session.py", line 1308, in _do_call
raise type(e)(node_def, op, message)
tensorflow.python.framework.errors_impl.InvalidArgumentError: TypeError: 'int' object is not iterable
Traceback (most recent call last):

File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/ops/script_ops.py", line 206, in __call__
ret = func(*args)

File "/usr/local/lib/python3.4/dist-packages/object_detection-0.1-py3.4.egg/object_detection/metrics/coco_evaluation.py", line 358, in first_value_func
self._metrics = self.evaluate()

File "/usr/local/lib/python3.4/dist-packages/object_detection-0.1-py3.4.egg/object_detection/metrics/coco_evaluation.py", line 207, in evaluate
self._detection_boxes_list)

File "/usr/local/lib/python3.4/dist-packages/object_detection-0.1-py3.4.egg/object_detection/metrics/coco_tools.py", line 134, in LoadAnnotations
results.createIndex()

File "/usr/local/lib/python3.4/dist-packages/pycocotools/coco.py", line 106, in createIndex
for cat in self.dataset['categories']:

TypeError: 'int' object is not iterable

 [[{{node PyFunc_3}} = PyFunc[Tin=[], Tout=[DT_FLOAT], token="pyfunc_5", _device="/job:localhost/replica:0/task:0/device:CPU:0"]()]]

Caused by op 'PyFunc_3', defined at:
File "legacy/model_main.py", line 109, in
tf.app.run()
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/platform/app.py", line 125, in run
_sys.exit(main(argv))
File "legacy/model_main.py", line 105, in main
tf.estimator.train_and_evaluate(estimator, train_spec, eval_specs[0])
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/estimator/training.py", line 471, in train_and_evaluate
return executor.run()
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/estimator/training.py", line 610, in run
return self.run_local()
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/estimator/training.py", line 711, in run_local
saving_listeners=saving_listeners)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/estimator/estimator.py", line 356, in train
loss = self._train_model(input_fn, hooks, saving_listeners)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/estimator/estimator.py", line 1181, in _train_model
return self._train_model_default(input_fn, hooks, saving_listeners)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/estimator/estimator.py", line 1215, in _train_model_default
saving_listeners)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/estimator/estimator.py", line 1409, in _train_with_estimator_spec
_, loss = mon_sess.run([estimator_spec.train_op, estimator_spec.loss])
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/training/monitored_session.py", line 671, in run
run_metadata=run_metadata)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/training/monitored_session.py", line 1148, in run
run_metadata=run_metadata)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/training/monitored_session.py", line 1224, in run
return self._sess.run(args, *kwargs)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/training/monitored_session.py", line 1304, in run
run_metadata=run_metadata))
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/training/basic_session_run_hooks.py", line 581, in after_run
if self._save(run_context.session, global_step):
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/training/basic_session_run_hooks.py", line 606, in _save
if l.after_save(session, step):
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/estimator/training.py", line 517, in after_save
self._evaluate(global_step_value) # updates self.eval_result
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/estimator/training.py", line 537, in _evaluate
self._evaluator.evaluate_and_export())
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/estimator/training.py", line 912, in evaluate_and_export
hooks=self._eval_spec.hooks)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/estimator/estimator.py", line 476, in evaluate
return _evaluate()
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/estimator/estimator.py", line 462, in _evaluate
self._evaluate_build_graph(input_fn, hooks, checkpoint_path))
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/estimator/estimator.py", line 1422, in _evaluate_build_graph
self._call_model_fn_eval(input_fn, self.config))
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/estimator/estimator.py", line 1458, in _call_model_fn_eval
features, labels, model_fn_lib.ModeKeys.EVAL, config)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/estimator/estimator.py", line 1169, in _call_model_fn
model_fn_results = self._model_fn(features=features, *kwargs)
File "/usr/local/lib/python3.4/dist-packages/object_detection-0.1-py3.4.egg/object_detection/model_lib.py", line 418, in model_fn
eval_config, list(category_index.values()), eval_dict)
File "/usr/local/lib/python3.4/dist-packages/object_detection-0.1-py3.4.egg/object_detection/eval_util.py", line 848, in get_eval_metric_ops_for_evaluators
eval_dict))
File "/usr/local/lib/python3.4/dist-packages/object_detection-0.1-py3.4.egg/object_detection/metrics/coco_evaluation.py", line 368, in get_estimator_eval_metric_ops
first_value_op = tf.py_func(first_value_func, [], tf.float32)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/ops/script_ops.py", line 457, in py_func
func=func, inp=inp, Tout=Tout, stateful=stateful, eager=False, name=name)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/ops/script_ops.py", line 281, in _internal_py_func
input=inp, token=token, Tout=Tout, name=name)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/ops/gen_script_ops.py", line 129, in py_func
"PyFunc", input=input, token=token, Tout=Tout, name=name)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/framework/op_def_library.py", line 787, in _apply_op_helper
op_def=op_def)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/util/deprecation.py", line 488, in new_func
return func(
args, **kwargs)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/framework/ops.py", line 3272, in create_op
op_def=op_def)
File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/framework/ops.py", line 1768, in __init__
self._traceback = tf_stack.extract_stack()

InvalidArgumentError (see above for traceback): TypeError: 'int' object is not iterable
Traceback (most recent call last):

File "/usr/local/lib/python3.4/dist-packages/tensorflow/python/ops/script_ops.py", line 206, in __call__
ret = func(*args)

File "/usr/local/lib/python3.4/dist-packages/object_detection-0.1-py3.4.egg/object_detection/metrics/coco_evaluation.py", line 358, in first_value_func
self._metrics = self.evaluate()

File "/usr/local/lib/python3.4/dist-packages/object_detection-0.1-py3.4.egg/object_detection/metrics/coco_evaluation.py", line 207, in evaluate
self._detection_boxes_list)

File "/usr/local/lib/python3.4/dist-packages/object_detection-0.1-py3.4.egg/object_detection/metrics/coco_tools.py", line 134, in LoadAnnotations
results.createIndex()

File "/usr/local/lib/python3.4/dist-packages/pycocotools/coco.py", line 106, in createIndex
for cat in self.dataset['categories']:

TypeError: 'int' object is not iterable

 [[{{node PyFunc_3}} = PyFunc[Tin=[], Tout=[DT_FLOAT], token="pyfunc_5", _device="/job:localhost/replica:0/task:0/device:CPU:0"]()]]

In my situation. I also had the same problem and I have changed category_index.values() to list(category_index.values()) in models\researchobject_detection/model_lib.py, but it did not work.
However, I also changed model_lib.py in where my python had been installed. For me it's C:\Users\ASUS\AppData\Local\Programs\Python\Python36\Lib\site-packages\object_detection-0.1-py3.6.egg\object_detection\model_lib.py and it finally works! Wish it will help.

@JerryZhuzq ,Thank you ,It worked me also. The reason could be as I already completed the object detection API setup i.e python setup.py install ,the change has to be done in Python36\Libsite-packagesobject_detection-0.1-py3.6.eggobject_detectionmodel_lib.py rather than actual repository cloned from git.

To whomever unable to understand the solution (@MaeThird 's comment), we will include this fix in next release.

changing to list as per @MaeThird worked for me. Didn't need to do @JerryZhuzq . Using TF 1.10.1

Does somebody how can I fizx this on Python 3.7.6? I can't find the 麓model_lib.py麓 that @MaeThird pointed for Python 3.6. Thanks!

My error is:
`In [44]: dill.dump_session('python_environment.pkl')

Traceback (most recent call last):

File "", line 1, in
dill.dump_session('python_environment.pkl')

File "D:\Downloads\Instaladores\WPy64-3760\python-3.7.6.amd64\libsite-packages\dill_dill.py", line 351, in dump_session
pickler.dump(main)

File "D:\Downloads\Instaladores\WPy64-3760\python-3.7.6.amd64\libsite-packages\dill_dill.py", line 445, in dump
StockPickler.dump(self, obj)

File "D:\Downloads\Instaladores\WPy64-3760\python-3.7.6.amd64\lib\pickle.py", line 437, in dump
self.save(obj)

File "D:\Downloads\Instaladores\WPy64-3760\python-3.7.6.amd64\lib\pickle.py", line 504, in save
f(self, obj) # Call unbound method with explicit self

File "D:\Downloads\Instaladores\WPy64-3760\python-3.7.6.amd64\libsite-packages\dill_dill.py", line 1295, in save_module
state=_main_dict)

File "D:\Downloads\Instaladores\WPy64-3760\python-3.7.6.amd64\lib\pickle.py", line 662, in save_reduce
save(state)

File "D:\Downloads\Instaladores\WPy64-3760\python-3.7.6.amd64\lib\pickle.py", line 504, in save
f(self, obj) # Call unbound method with explicit self

File "D:\Downloads\Instaladores\WPy64-3760\python-3.7.6.amd64\libsite-packages\dill_dill.py", line 912, in save_module_dict
StockPickler.save_dict(pickler, obj)

File "D:\Downloads\Instaladores\WPy64-3760\python-3.7.6.amd64\lib\pickle.py", line 859, in save_dict
self._batch_setitems(obj.items())

File "D:\Downloads\Instaladores\WPy64-3760\python-3.7.6.amd64\lib\pickle.py", line 885, in _batch_setitems
save(v)

File "D:\Downloads\Instaladores\WPy64-3760\python-3.7.6.amd64\lib\pickle.py", line 504, in save
f(self, obj) # Call unbound method with explicit self

File "D:\Downloads\Instaladores\WPy64-3760\python-3.7.6.amd64\libsite-packages\dill_dill.py", line 912, in save_module_dict
StockPickler.save_dict(pickler, obj)

File "D:\Downloads\Instaladores\WPy64-3760\python-3.7.6.amd64\lib\pickle.py", line 859, in save_dict
self._batch_setitems(obj.items())

File "D:\Downloads\Instaladores\WPy64-3760\python-3.7.6.amd64\lib\pickle.py", line 885, in _batch_setitems
save(v)

File "D:\Downloads\Instaladores\WPy64-3760\python-3.7.6.amd64\lib\pickle.py", line 524, in save
rv = reduce(self.proto)

TypeError: can't pickle dict_items objects`

Hello, Even i get the same issue with Python 3.
Please recommend what should be done.

TypeError Traceback (most recent call last)
/databricks/spark/python/pyspark/serializers.py in dumps(self, obj)
704 try:
--> 705 return cloudpickle.dumps(obj, 2)
706 except pickle.PickleError:

/databricks/spark/python/pyspark/cloudpickle.py in dumps(obj, protocol)
862 cp = CloudPickler(file,protocol)
--> 863 cp.dump(obj)
864 return file.getvalue()

/databricks/spark/python/pyspark/cloudpickle.py in dump(self, obj)
259 try:
--> 260 return Pickler.dump(self, obj)
261 except RuntimeError as e:

/usr/lib/python3.7/pickle.py in dump(self, obj)
436 self.framer.start_framing()
--> 437 self.save(obj)
438 self.write(STOP)

/usr/lib/python3.7/pickle.py in save(self, obj, save_persistent_id)
503 if f is not None:
--> 504 f(self, obj) # Call unbound method with explicit self
505 return

/usr/lib/python3.7/pickle.py in save_tuple(self, obj)
770 for element in obj:
--> 771 save(element)
772 # Subtle. Same as in the big comment below.

/usr/lib/python3.7/pickle.py in save(self, obj, save_persistent_id)
503 if f is not None:
--> 504 f(self, obj) # Call unbound method with explicit self
505 return

/databricks/spark/python/pyspark/cloudpickle.py in save_function(self, obj, name)
405 if klass is None or klass is not obj:
--> 406 self.save_function_tuple(obj)
407 return

/databricks/spark/python/pyspark/cloudpickle.py in save_function_tuple(self, func)
548 state['qualname'] = func.__qualname__
--> 549 save(state)
550 write(pickle.TUPLE)

/usr/lib/python3.7/pickle.py in save(self, obj, save_persistent_id)
503 if f is not None:
--> 504 f(self, obj) # Call unbound method with explicit self
505 return

/usr/lib/python3.7/pickle.py in save_dict(self, obj)
855 self.memoize(obj)
--> 856 self._batch_setitems(obj.items())
857

/usr/lib/python3.7/pickle.py in _batch_setitems(self, items)
881 save(k)
--> 882 save(v)
883 write(SETITEMS)

/usr/lib/python3.7/pickle.py in save(self, obj, save_persistent_id)
503 if f is not None:
--> 504 f(self, obj) # Call unbound method with explicit self
505 return

/usr/lib/python3.7/pickle.py in save_list(self, obj)
815 self.memoize(obj)
--> 816 self._batch_appends(obj)
817

/usr/lib/python3.7/pickle.py in _batch_appends(self, items)
839 for x in tmp:
--> 840 save(x)
841 write(APPENDS)

/usr/lib/python3.7/pickle.py in save(self, obj, save_persistent_id)
503 if f is not None:
--> 504 f(self, obj) # Call unbound method with explicit self
505 return

/databricks/spark/python/pyspark/cloudpickle.py in save_function(self, obj, name)
399 or themodule is None):
--> 400 self.save_function_tuple(obj)
401 return

/databricks/spark/python/pyspark/cloudpickle.py in save_function_tuple(self, func)
548 state['qualname'] = func.__qualname__
--> 549 save(state)
550 write(pickle.TUPLE)

/usr/lib/python3.7/pickle.py in save(self, obj, save_persistent_id)
503 if f is not None:
--> 504 f(self, obj) # Call unbound method with explicit self
505 return

/usr/lib/python3.7/pickle.py in save_dict(self, obj)
855 self.memoize(obj)
--> 856 self._batch_setitems(obj.items())
857

/usr/lib/python3.7/pickle.py in _batch_setitems(self, items)
881 save(k)
--> 882 save(v)
883 write(SETITEMS)

/usr/lib/python3.7/pickle.py in save(self, obj, save_persistent_id)
503 if f is not None:
--> 504 f(self, obj) # Call unbound method with explicit self
505 return

/usr/lib/python3.7/pickle.py in save_dict(self, obj)
855 self.memoize(obj)
--> 856 self._batch_setitems(obj.items())
857

/usr/lib/python3.7/pickle.py in _batch_setitems(self, items)
886 save(k)
--> 887 save(v)
888 write(SETITEM)

/usr/lib/python3.7/pickle.py in save(self, obj, save_persistent_id)
503 if f is not None:
--> 504 f(self, obj) # Call unbound method with explicit self
505 return

/databricks/spark/python/pyspark/cloudpickle.py in save_function(self, obj, name)
399 or themodule is None):
--> 400 self.save_function_tuple(obj)
401 return

/databricks/spark/python/pyspark/cloudpickle.py in save_function_tuple(self, func)
548 state['qualname'] = func.__qualname__
--> 549 save(state)
550 write(pickle.TUPLE)

/usr/lib/python3.7/pickle.py in save(self, obj, save_persistent_id)
503 if f is not None:
--> 504 f(self, obj) # Call unbound method with explicit self
505 return

/usr/lib/python3.7/pickle.py in save_dict(self, obj)
855 self.memoize(obj)
--> 856 self._batch_setitems(obj.items())
857

/usr/lib/python3.7/pickle.py in _batch_setitems(self, items)
881 save(k)
--> 882 save(v)
883 write(SETITEMS)

/usr/lib/python3.7/pickle.py in save(self, obj, save_persistent_id)
503 if f is not None:
--> 504 f(self, obj) # Call unbound method with explicit self
505 return

/usr/lib/python3.7/pickle.py in save_dict(self, obj)
855 self.memoize(obj)
--> 856 self._batch_setitems(obj.items())
857

/usr/lib/python3.7/pickle.py in _batch_setitems(self, items)
881 save(k)
--> 882 save(v)
883 write(SETITEMS)

/usr/lib/python3.7/pickle.py in save(self, obj, save_persistent_id)
503 if f is not None:
--> 504 f(self, obj) # Call unbound method with explicit self
505 return

/usr/lib/python3.7/pickle.py in save_list(self, obj)
815 self.memoize(obj)
--> 816 self._batch_appends(obj)
817

/usr/lib/python3.7/pickle.py in _batch_appends(self, items)
839 for x in tmp:
--> 840 save(x)
841 write(APPENDS)

/usr/lib/python3.7/pickle.py in save(self, obj, save_persistent_id)
523 if reduce is not None:
--> 524 rv = reduce(self.proto)
525 else:

TypeError: can't pickle dict_values objects

During handling of the above exception, another exception occurred:

PicklingError Traceback (most recent call last)
in
----> 1 read_file['ObjectID'].apply(odata)

/local_disk0/pythonVirtualEnvDirs/virtualEnv-e6ccc149-901a-412c-b6f3-19fe27ba5f0d/lib/python3.7/site-packages/databricks/koalas/usage_logging/__init__.py in wrapper(args, *kwargs)
168 start = time.perf_counter()
169 try:
--> 170 res = func(args, *kwargs)
171 logger.log_success(
172 class_name, function_name, time.perf_counter() - start, signature

/local_disk0/pythonVirtualEnvDirs/virtualEnv-e6ccc149-901a-412c-b6f3-19fe27ba5f0d/lib/python3.7/site-packages/databricks/koalas/series.py in apply(self, func, args, *kwds)
2629 else:
2630 wrapped = ks.pandas_wraps(return_col=return_sig)(apply_each)
-> 2631 return wrapped(self, *args, *
kwds).rename(self.name)
2632
2633 # TODO: not all arguments are implemented comparing to Pandas' for now.

/local_disk0/pythonVirtualEnvDirs/virtualEnv-e6ccc149-901a-412c-b6f3-19fe27ba5f0d/lib/python3.7/site-packages/databricks/koalas/typedef.py in wrapper(args, *kwargs)
393 )
394 spark_return_type = sig_return.tpe
--> 395 return _make_fun(f, spark_return_type, args, *kwargs)
396
397 return wrapper

/local_disk0/pythonVirtualEnvDirs/virtualEnv-e6ccc149-901a-412c-b6f3-19fe27ba5f0d/lib/python3.7/site-packages/databricks/koalas/typedef.py in _make_fun(f, return_type, args, *kwargs)
277 spark_col_args.append(col._scol)
278 kw_name_tokens.append("{}={}".format(key, col.name))
--> 279 col = wrapped_udf(*spark_col_args)
280 series = kser._with_new_scol(scol=col) # type: 'ks.Series'
281 all_name_tokens = name_tokens + sorted(kw_name_tokens)

/databricks/spark/python/pyspark/sql/udf.py in wrapper(args)
194 @functools.wraps(self.func, assigned=assignments)
195 def wrapper(
args):
--> 196 return self(*args)
197
198 wrapper.__name__ = self._name

/databricks/spark/python/pyspark/sql/udf.py in __call__(self, *cols)
172
173 def __call__(self, *cols):
--> 174 judf = self._judf
175 sc = SparkContext._active_spark_context
176 return Column(judf.apply(_to_seq(sc, cols, _to_java_column)))

/databricks/spark/python/pyspark/sql/udf.py in _judf(self)
156 # and should have a minimal performance impact.
157 if self._judf_placeholder is None:
--> 158 self._judf_placeholder = self._create_judf()
159 return self._judf_placeholder
160

/databricks/spark/python/pyspark/sql/udf.py in _create_judf(self)
165 sc = spark.sparkContext
166
--> 167 wrapped_func = _wrap_function(sc, self.func, self.returnType)
168 jdt = spark._jsparkSession.parseDataType(self.returnType.json())
169 judf = sc._jvm.org.apache.spark.sql.execution.python.UserDefinedPythonFunction(

/databricks/spark/python/pyspark/sql/udf.py in _wrap_function(sc, func, returnType)
33 def _wrap_function(sc, func, returnType):
34 command = (func, returnType)
---> 35 pickled_command, broadcast_vars, env, includes = _prepare_for_python_RDD(sc, command)
36 return sc._jvm.PythonFunction(bytearray(pickled_command), env, includes, sc.pythonExec,
37 sc.pythonVer, broadcast_vars, sc._javaAccumulator)

/databricks/spark/python/pyspark/rdd.py in _prepare_for_python_RDD(sc, command)
2461 # the serialized command will be compressed by broadcast
2462 ser = CloudPickleSerializer()
-> 2463 pickled_command = ser.dumps(command)
2464 if len(pickled_command) > sc._jvm.PythonUtils.getBroadcastThreshold(sc._jsc): # Default 1M
2465 # The broadcast will have same life cycle as created PythonRDD

/databricks/spark/python/pyspark/serializers.py in dumps(self, obj)
713 msg = "Could not serialize object: %s: %s" % (e.__class__.__name__, emsg)
714 cloudpickle.print_exec(sys.stderr)
--> 715 raise pickle.PicklingError(msg)
716
717

PicklingError: Could not serialize object: TypeError: can't pickle dict_values objects

Was this page helpful?
0 / 5 - 0 ratings