Got error by running demo code

Here’s the demo code and i didn’t change anything
Then I got ERROR.

import dask.array as da
import lightgbm as lgb
import numpy as np
from distributed import Client, LocalCluster

cluster = LocalCluster(n_workers=2)
client = Client(cluster)

X = da.random.random((1000, 10), (500, 10))
y = da.random.random((1000,), (500,))

def custom_l2_obj(y_true, y_pred):
    grad = y_pred - y_true
    hess = np.ones(len(y_true))
    return grad, hess

dask_model = lgb.DaskLGBMRegressor(
    objective=custom_l2_obj
)
dask_model.fit(X, y)
---------------------------------------------------------------------------
LightGBMError                             Traceback (most recent call last)
Cell In[2], line 20
     15     return grad, hess
     17 dask_model = lgb.DaskLGBMRegressor(
     18     objective=custom_l2_obj
     19 )
---> 20 dask_model.fit(X, y)

File c:\Users\qingan\AppData\Local\Programs\Python\Python39\lib\site-packages\lightgbm\dask.py:1355, in DaskLGBMRegressor.fit(self, X, y, sample_weight, init_score, eval_set, eval_names, eval_sample_weight, eval_init_score, eval_metric, early_stopping_rounds, **kwargs)
   1352 if early_stopping_rounds is not None:
   1353     raise RuntimeError('early_stopping_rounds is not currently supported in lightgbm.dask')
-> 1355 return self._lgb_dask_fit(
   1356     model_factory=LGBMRegressor,
   1357     X=X,
   1358     y=y,
   1359     sample_weight=sample_weight,
   1360     init_score=init_score,
   1361     eval_set=eval_set,
   1362     eval_names=eval_names,
   1363     eval_sample_weight=eval_sample_weight,
   1364     eval_init_score=eval_init_score,
   1365     eval_metric=eval_metric,
   1366     **kwargs
   1367 )

File c:\Users\qingan\AppData\Local\Programs\Python\Python39\lib\site-packages\lightgbm\dask.py:1047, in _DaskLGBMModel._lgb_dask_fit(self, model_factory, X, y, sample_weight, init_score, group, eval_set, eval_names, eval_sample_weight, eval_class_weight, eval_init_score, eval_group, eval_metric, eval_at, early_stopping_rounds, **kwargs)
   1044 params = self.get_params(True)
   1045 params.pop("client", None)
-> 1047 model = _train(
   1048     client=_get_dask_client(self.client),
   1049     data=X,
   1050     label=y,
   1051     params=params,
   1052     model_factory=model_factory,
   1053     sample_weight=sample_weight,
   1054     init_score=init_score,
   1055     group=group,
   1056     eval_set=eval_set,
   1057     eval_names=eval_names,
   1058     eval_sample_weight=eval_sample_weight,
   1059     eval_class_weight=eval_class_weight,
   1060     eval_init_score=eval_init_score,
   1061     eval_group=eval_group,
   1062     eval_metric=eval_metric,
   1063     eval_at=eval_at,
   1064     **kwargs
   1065 )
   1067 self.set_params(**model.get_params())
   1068 self._lgb_dask_copy_extra_params(model, self)

File c:\Users\qingan\AppData\Local\Programs\Python\Python39\lib\site-packages\lightgbm\dask.py:785, in _train(client, data, label, params, model_factory, sample_weight, init_score, group, eval_set, eval_names, eval_sample_weight, eval_class_weight, eval_init_score, eval_group, eval_metric, eval_at, **kwargs)
    758 # Tell each worker to train on the parts that it has locally
    759 #
    760 # This code treats ``_train_part()`` calls as not "pure" because:
   (...)
    764 #        relies on global state (it and all the other LightGBM training processes
    765 #        coordinate with each other)
    766 futures_classifiers = [
    767     client.submit(
    768         _train_part,
   (...)
    782     for worker, list_of_parts in worker_map.items()
    783 ]
--> 785 results = client.gather(futures_classifiers)
    786 results = [v for v in results if v]
    787 model = results[0]

File c:\Users\qingan\AppData\Local\Programs\Python\Python39\lib\site-packages\lightgbm\dask.py:319, in _train_part()
    305         model.fit(
    306             data,
    307             label,
   (...)
    316             **kwargs
    317         )
    318     else:
--> 319         model.fit(
    320             data,
    321             label,
    322             sample_weight=weight,
    323             init_score=init_score,
    324             eval_set=local_eval_set,
    325             eval_sample_weight=local_eval_sample_weight,
    326             eval_init_score=local_eval_init_score,
    327             eval_names=local_eval_names,
    328             **kwargs
    329         )
    331 finally:
    332     if getattr(model, "fitted_", False):

File c:\Users\qingan\AppData\Local\Programs\Python\Python39\lib\site-packages\lightgbm\sklearn.py:895, in fit()
    888 def fit(self, X, y,
    889         sample_weight=None, init_score=None,
    890         eval_set=None, eval_names=None, eval_sample_weight=None,
    891         eval_init_score=None, eval_metric=None, early_stopping_rounds=None,
    892         verbose='warn', feature_name='auto', categorical_feature='auto',
    893         callbacks=None, init_model=None):
    894     """Docstring is inherited from the LGBMModel."""
--> 895     super().fit(X, y, sample_weight=sample_weight, init_score=init_score,
    896                 eval_set=eval_set, eval_names=eval_names, eval_sample_weight=eval_sample_weight,
    897                 eval_init_score=eval_init_score, eval_metric=eval_metric,
    898                 early_stopping_rounds=early_stopping_rounds, verbose=verbose, feature_name=feature_name,
    899                 categorical_feature=categorical_feature, callbacks=callbacks, init_model=init_model)
    900     return self

File c:\Users\qingan\AppData\Local\Programs\Python\Python39\lib\site-packages\lightgbm\sklearn.py:748, in fit()
    745 evals_result = {}
    746 callbacks.append(record_evaluation(evals_result))
--> 748 self._Booster = train(
    749     params=params,
    750     train_set=train_set,
    751     num_boost_round=self.n_estimators,
    752     valid_sets=valid_sets,
    753     valid_names=eval_names,
    754     fobj=self._fobj,
    755     feval=eval_metrics_callable,
    756     init_model=init_model,
    757     feature_name=feature_name,
    758     callbacks=callbacks
    759 )
    761 if evals_result:
    762     self._evals_result = evals_result

File c:\Users\qingan\AppData\Local\Programs\Python\Python39\lib\site-packages\lightgbm\engine.py:271, in train()
    269 # construct booster
    270 try:
--> 271     booster = Booster(params=params, train_set=train_set)
    272     if is_valid_contain_train:
    273         booster.set_train_data_name(train_data_name)

File c:\Users\qingan\AppData\Local\Programs\Python\Python39\lib\site-packages\lightgbm\basic.py:2598, in __init__()
   2588     params = _choose_param_value(
   2589         main_param_name="num_machines",
   2590         params=params,
   2591         default_value=num_machines_from_machine_list
   2592     )
   2593     params = _choose_param_value(
   2594         main_param_name="local_listen_port",
   2595         params=params,
   2596         default_value=12400
   2597     )
-> 2598     self.set_network(
   2599         machines=machines,
   2600         local_listen_port=params["local_listen_port"],
   2601         listen_time_out=params.get("time_out", 120),
   2602         num_machines=params["num_machines"]
   2603     )
   2604 # construct booster object
   2605 train_set.construct()

File c:\Users\qingan\AppData\Local\Programs\Python\Python39\lib\site-packages\lightgbm\basic.py:2742, in set_network()
   2740 if isinstance(machines, (list, set)):
   2741     machines = ','.join(machines)
-> 2742 _safe_call(_LIB.LGBM_NetworkInit(c_str(machines),
   2743                                  ctypes.c_int(local_listen_port),
   2744                                  ctypes.c_int(listen_time_out),
   2745                                  ctypes.c_int(num_machines)))
   2746 self.network = True
   2747 return self

File c:\Users\qingan\AppData\Local\Programs\Python\Python39\lib\site-packages\lightgbm\basic.py:125, in _safe_call()
    117 """Check the return value from C API call.
    118 
    119 Parameters
   (...)
    122     The return value from C API calls.
    123 """
    124 if ret != 0:
--> 125     raise LightGBMError(_LIB.LGBM_GetLastError().decode('utf-8'))

LightGBMError: Machine list file doesn't contain the local machine

WHO CAN HELP ME PLEASE!!!

Hi @Bojack, welcome to Dask community,

I just tried you example (from where did you take this one?), and it worked fine on my laptop.

This looks more like a LightGBM problem than a Dask one, but I strongly suspect an Python environment issue. How did you install the libraries?