I ran the compare_models and returned the top 4 models. I then saved this 4 models to the disk. Now when I try to tune_model by loading this saved models I get a
ValueError: When passing a model not in PyCaret's model library, the custom_grid parameter must be provided.
The model currently being tuned in the loop is the xgboost model which is in the Pycaret's library. So I don't really understand the source of this value error Here is the relevant part of the code
# Comparing models and returning the top 4
models = exp.compare_models(include=models_to_ignore,n_select=4)
# Saving the models
from pathlib import Path
import os
model_dir = Path('../models')
for index, model in enumerate(models):
model_file_name = models_metrics.index[index]
print(f'Saving {model_file_name} model')
save_model(model, os.path.join(model_dir, model_file_name))
#Loading and tuning the saved models
from pathlib import Path
from glob import glob
import os
model_dir = Path('../models')
for model in glob(os.path.join(model_dir, '*.pkl')):
tuned_model_file = (os.path.basename(model))
tuned_model_file = os.path.splitext(tuned_model_file)[0]
model = os.path.splitext(model)[0]
print(f'Started tuning {model} model')
model = load_model(model)
print(model)
tuned_model = tune_model(
model,
n_iter=20,
optimize='RMSLE',
search_library="tune-sklearn",
search_algorithm="optuna",
early_stopping='asha',
return_tuner=True,
return_train_score=True
)
print(f'Saving Model to {tuned_model_file}')
save_model(tuned_model, os.path.join(model_dir, tuned_file_name))
# Error output
Started tuning ../models/xgboost model
Transformation Pipeline and Model Successfully Loaded
Pipeline(memory=FastMemory(location=/var/folders/vc/262dx0q508d6dv26phhy52ph0000gn/T/joblib),
steps=[('target_transformation',
TransformerWrapperWithInverse(transformer=TargetTransformer(estimator=PowerTransformer(standardize=False)))),
('numerical_imputer',
TransformerWrapper(include=['onpromotion', 'oil_price'],
transformer=SimpleImputer())),
('categori...
feature_types=None, gamma=None, gpu_id=None,
grow_policy=None, importance_type=None,
interaction_constraints=None, learning_rate=None,
max_bin=None, max_cat_threshold=None,
max_cat_to_onehot=None, max_delta_step=None,
max_depth=None, max_leaves=None,
min_child_weight=None, missing=nan,
monotone_constraints=None, n_estimators=100,
n_jobs=2, num_parallel_tree=None, predictor=None,
random_state=4758, ...))])
Initiated . . . . . . . . . . . . . . . . . . 16:12:40
Status . . . . . . . . . . . . . . . . . . Loading Dependencies
Estimator . . . . . . . . . . . . . . . . . . Compiling Library
Processing: 0%
0/7 [00:00<?, ?it/s]
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
/var/folders/vc/262dx0q508d6dv26phhy52ph0000gn/T/ipykernel_12938/1177041359.py in <cell line: 5>()
13 print(model)
14
---> 15 tuned_model = tune_model(
16 model
17 # n_iter=20,
/opt/anaconda3/envs/store_sales/lib/python3.10/site-packages/pycaret/utils/generic.py in wrapper(*args, **kwargs)
963 if globals_d[name] is None:
964 raise ValueError(message)
--> 965 return func(*args, **kwargs)
966
967 return wrapper
/opt/anaconda3/envs/store_sales/lib/python3.10/site-packages/pycaret/regression/functional.py in tune_model(estimator, fold, round, n_iter, custom_grid, optimize, custom_scorer, search_library, search_algorithm, early_stopping, early_stopping_max_iters, choose_better, fit_kwargs, groups, return_tuner, verbose, tuner_verbose, return_train_score, **kwargs)
1203 """
1204
-> 1205 return _CURRENT_EXPERIMENT.tune_model(
1206 estimator=estimator,
1207 fold=fold,
/opt/anaconda3/envs/store_sales/lib/python3.10/site-packages/pycaret/regression/oop.py in tune_model(self, estimator, fold, round, n_iter, custom_grid, optimize, custom_scorer, search_library, search_algorithm, early_stopping, early_stopping_max_iters, choose_better, fit_kwargs, groups, return_tuner, verbose, tuner_verbose, return_train_score, **kwargs)
1503 """
1504
-> 1505 return super().tune_model(
1506 estimator=estimator,
1507 fold=fold,
/opt/anaconda3/envs/store_sales/lib/python3.10/site-packages/pycaret/internal/pycaret_experiment/supervised_experiment.py in tune_model(self, estimator, fold, round, n_iter, custom_grid, optimize, custom_scorer, search_library, search_algorithm, early_stopping, early_stopping_max_iters, choose_better, fit_kwargs, groups, return_tuner, verbose, tuner_verbose, return_train_score, **kwargs)
2238 if estimator_id is None:
2239 if custom_grid is None:
-> 2240 raise ValueError(
2241 "When passing a model not in PyCaret's model library, the custom_grid parameter must be provided."
2242 )
ValueError: When passing a model not in PyCaret's model library, the custom_grid parameter must be provided.