Hi,
Are you looking for the following:
AttributeError Traceback (most recent call last)
c:\users\rohit_gosain\appdata\local\programs\python\python39\lib\site-packages\distributed\protocol\pickle.py in dumps(x, buffer_callback, protocol)
48 buffers.clear()
—> 49 result = pickle.dumps(x, **dump_kwargs)
50 if len(result) < 1000:
AttributeError: Can’t pickle local object ‘create_engine..connect’
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
in
1 b = time.time()
----> 2 json = await get_total_gds(engine,‘HFX4T’,‘EMFP’,91,last_saturday,current_date,False,True,‘’)
3 e = time.time()
4 print('Timeeee : ', e-b)
5 print(json)
in get_total_gds(engine, part, site, days, last_saturday, current_date, week, compressed, cfg)
84 display(img)
85
—> 86 json = dask.compute(df_final_gds)
87 ##print(json)
88
c:\users\rohit_gosain\appdata\local\programs\python\python39\lib\site-packages\dask\base.py in compute(traverse, optimize_graph, scheduler, get, *args, **kwargs)
569 postcomputes.append(x.dask_postcompute())
570
→ 571 results = schedule(dsk, keys, **kwargs)
572 return repack([f(r, *a) for r, (f, a) in zip(results, postcomputes)])
573
c:\users\rohit_gosain\appdata\local\programs\python\python39\lib\site-packages\distributed\client.py in get(self, dsk, keys, workers, allow_other_workers, resources, sync, asynchronous, direct, retries, priority, fifo_timeout, actors, **kwargs)
2669 Client.compute : Compute asynchronous collections
2670 “”"
→ 2671 futures = self._graph_to_futures(
2672 dsk,
2673 keys=set(flatten([keys])),
c:\users\rohit_gosain\appdata\local\programs\python\python39\lib\site-packages\distributed\client.py in _graph_to_futures(self, dsk, keys, workers, allow_other_workers, priority, user_priority, resources, retries, fifo_timeout, actors)
2594 # Pack the high level graph before sending it to the scheduler
2595 keyset = set(keys)
→ 2596 dsk = dsk.dask_distributed_pack(self, keyset, annotations)
2597
2598 # Create futures before sending graph (helps avoid contention)
c:\users\rohit_gosain\appdata\local\programs\python\python39\lib\site-packages\dask\highlevelgraph.py in dask_distributed_pack(self, client, client_keys, annotations)
1074 “module”: layer.module,
1075 “name”: type(layer).name,
→ 1076 “state”: layer.dask_distributed_pack(
1077 self.get_all_external_keys(),
1078 self.key_dependencies,
c:\users\rohit_gosain\appdata\local\programs\python\python39\lib\site-packages\dask\highlevelgraph.py in dask_distributed_pack(self, all_hlg_keys, known_key_dependencies, client, client_keys)
432 for k, v in dsk.items()
433 }
→ 434 dsk = toolz.valmap(dumps_task, dsk)
435 return {“dsk”: dsk, “dependencies”: dependencies}
436
c:\users\rohit_gosain\appdata\local\programs\python\python39\lib\site-packages\toolz\dicttoolz.py in valmap(func, d, factory)
81 “”"
82 rv = factory()
—> 83 rv.update(zip(d.keys(), map(func, d.values())))
84 return rv
85
c:\users\rohit_gosain\appdata\local\programs\python\python39\lib\site-packages\distributed\worker.py in dumps_task(task)
4353 return d
4354 elif not any(map(_maybe_complex, task[1:])):
→ 4355 return {“function”: dumps_function(task[0]), “args”: warn_dumps(task[1:])}
4356 return to_serialize(task)
4357
c:\users\rohit_gosain\appdata\local\programs\python\python39\lib\site-packages\distributed\worker.py in warn_dumps(obj, dumps, limit)
4362 def warn_dumps(obj, dumps=pickle.dumps, limit=1e6):
4363 “”“Dump an object to bytes, warn if those bytes are large”“”
→ 4364 b = dumps(obj, protocol=4)
4365 if not _warn_dumps_warned[0] and len(b) > limit:
4366 _warn_dumps_warned[0] = True
c:\users\rohit_gosain\appdata\local\programs\python\python39\lib\site-packages\distributed\protocol\pickle.py in dumps(x, buffer_callback, protocol)
58 try:
59 buffers.clear()
—> 60 result = cloudpickle.dumps(x, **dump_kwargs)
61 except Exception as e:
62 logger.info(“Failed to serialize %s. Exception: %s”, x, e)
c:\users\rohit_gosain\appdata\local\programs\python\python39\lib\site-packages\cloudpickle\cloudpickle_fast.py in dumps(obj, protocol, buffer_callback)
71 file, protocol=protocol, buffer_callback=buffer_callback
72 )
—> 73 cp.dump(obj)
74 return file.getvalue()
75
c:\users\rohit_gosain\appdata\local\programs\python\python39\lib\site-packages\cloudpickle\cloudpickle_fast.py in dump(self, obj)
600 def dump(self, obj):
601 try:
→ 602 return Pickler.dump(self, obj)
603 except RuntimeError as e:
604 if “recursion” in e.args[0]:
TypeError: cannot pickle ‘sqlalchemy.cprocessors.UnicodeResultProcessor’ object
json