#r #tensorflow #machine-learning #keras
#r #tensorflow #машинное обучение #keras
Вопрос:
У меня проблема с моими trainingtarget
, trainingtarget
, trainLables
и testLables
входными переменными в моем model
, несмотря на несколько сообщений на эту тему (например. https://github.com/rstudio/tensorflow/issues/375 ). Я всегда нахожу Error in py_call_impl(callable, dots$args, dots$keywords) :
. В моем примере:
#Packages
library(keras)
library(dplyr)
#Data set
RES_F<-read.csv("https://raw.githubusercontent.com/Leprechault/trash/main/cnn_ds.csv",sep=",",h=T)
str(RES_F)
#'data.frame': 11884 obs. of 5 variables:
# $ status: chr "healthy" "attack" "healthy" "attack" ...
# $ NDVI : num 0.459 0.311 0.565 0.529 0.434 ...
# $ SIPI : num 0.448 0.65 0.346 0.418 0.488 ...
# $ RGI : num 0.592 0.718 0.604 0.619 0.685 ...
# $ PRI : num 0.6 0.631 0.629 0.586 0.641 ...
# Training using 80%
RES_train<-RES_F%>% group_by(status) %>% sample_n(1280)
RES_train<-as.data.frame(RES_train)
#Using 10% for test
rest.RES <- anti_join(RES_F, RES_train)
RES_test<-rest.RES%>% group_by(status) %>% sample_n(160)
RES_test<-as.data.frame(RES_test)
#Using 10% for validation
rest.RES2 <- anti_join(rest.RES, RES_test)
RES_val<-rest.RES2%>% group_by(status) %>% sample_n(160)
RES_val<-as.data.frame(RES_val)
training <- RES_train[,2:5] # includes all independent variables
test <- RES_test[,2:5] # includes all independent variables
# also don't forget to identify the target variable (we can use one for training and test)
trainingtarget <- as.numeric(as.factor(RES_train[,1]))-1 # includes the dependent variable status for the training data
testtarget <- as.numeric(as.factor(RES_test[,1]))-1
#################################### Undertake analysis ######################################
# create the categorical variables
trainLables <- keras::to_categorical(trainingtarget)
testLables <- keras::to_categorical(testtarget)
# create the first model design
model <- keras_model_sequential()
# the keras_model_sequential consists of a linear stack of layers (in some sequential linear order)
# now we use the pipe function (%>%) to pass info from left to right, i.e., add additonal functions to 'model'
model %>%
layer_dense(units=8, activation = 'relu', input_shape = 21) %>% # this is for independent variables
layer_dense(units=3, activation = 'softmax')
########################## Configure the model for the learning process ############################
model %>% keras::compile(loss='binary_crossentrophy',
optimizer='adam',
metrics='accuracy')
# binary_crossentrophy is used when we have categorical variables (2 options here; status)
# adam is a commonly used optimiser
# accuracy is how accurate the predicted model matches the observed result. This is the metric
history <- model%>%
fit(training, # this is the input, the first 21 independent variables
trainLables,
epoch=200,
batch=32,
validation_split = 0.2)
# here we use the model we created to fit the training data (training)
# to fit the dependent variables (3 dummy coded), trainLabels
# and run the model 200 times.
# we use 32 batches as the number of samples we can use per gradient
# use 20% of the data for the validation split
Error in py_call_impl(callable, dots$args, dots$keywords) :
ValueError: in user code:
C:UsersforesAppDataLocalR-MINI~1envsR-RETI~1libsite-packagestensorflowpythonkerasenginetraining.py:571 train_function *
outputs = self.distribute_strategy.run(
C:UsersforesAppDataLocalR-MINI~1envsR-RETI~1libsite-packagestensorflowpythondistributedistribute_lib.py:951 run **
return self._extended.call_for_each_replica(fn, args=args, kwargs=kwargs)
C:UsersforesAppDataLocalR-MINI~1envsR-RETI~1libsite-packagestensorflowpythondistributedistribute_lib.py:2290 call_for_each_replica
return self._call_for_each_replica(fn, args, kwargs)
C:UsersforesAppDataLocalR-MINI~1envsR-RETI~1libsite-packagestensorflowpythondistributedistribute_lib.py:2649 _call_for_each_replica
return fn(*args, **kwargs)
C:UsersforesAppDataLocalR-MINI~1envsR-RETI~1libsite-packagestensorflowpythonkerasenginetraining.py:531 train_step **
y_pred = self(x, trainin
6.
stop(structure(list(message = "ValueError: in user code:nn C:\Users\fores\AppData\Local\R-MINI~1\envs\R-RETI~1\lib\site-packages\tensorflow\python\keras\engine\training.py:571 train_function *n outputs = self.distribute_strategy.run(n C:\Users\fores\AppData\Local\R-MINI~1\envs\R-RETI~1\lib\site-packages\tensorflow\python\distribute\distribute_lib.py:951 run **n return self._extended.call_for_each_replica(fn, args=args, kwargs=kwargs)n C:\Users\fores\AppData\Local\R-MINI~1\envs\R-RETI~1\lib\site-packages\tensorflow\python\distribute\distribute_lib.py:2290 call_for_each_replican return self._call_for_each_replica(fn, args, kwargs)n C:\Users\fores\AppData\Local\R-MINI~1\envs\R-RETI~1\lib\site-packages\tensorflow\python\distribute\distribute_lib.py:2649 _call_for_each_replican return fn(*args, **kwargs)n C:\Users\fores\AppData\Local\R-MINI~1\envs\R-RETI~1\lib\site-packages\tensorflow\python\keras\engine\training.py:531 train_step **n y_pred = self(x, training=True)n C:\Users\fores\AppData\Local\R-MINI~1\envs\R-RETI~1\lib\site-packages\tensorflow\python\keras\engine\base_layer.py:886 __call__n self.name)n C:\Users\fores\AppData\Local\R-MINI~1\envs\R-RETI~1\lib\site-packages\tensorflow\python\keras\engine\input_spec.py:158 assert_input_compatibilityn ' input tensors. Inputs received: ' str(inputs))nn ValueError: Layer sequential expects 1 inputs, but it received 4 input tensors. Inputs received: [<tf.Tensor 'ExpandDims:0' shape=(32, 1) dtype=float32>, <tf.Tensor 'ExpandDims_1:0' shape=(32, 1) dtype=float32>, <tf.Tensor 'ExpandDims_2:0' shape=(32, 1) dtype=float32>, <tf.Tensor 'ExpandDims_3:0' shape=(32, 1) dtype=float32>]n",
call = py_call_impl(callable, dots$args, dots$keywords),
cppstack = NULL), class = c("Rcpp::exception", "C Error",
"error", "condition")))
5.
(structure(function (...)
{
dots <- py_resolve_dots(list(...))
result <- py_call_impl(callable, dots$args, dots$keywords) ...
4.
do.call(object$fit, args)
3.
fit.keras.engine.training.Model(., training, trainLables, epoch = 200,
batch = 32, validation_split = 0.2)
2.
fit(., training, trainLables, epoch = 200, batch = 32, validation_split = 0.2)
1.
model %>% fit(training, trainLables, epoch = 200, batch = 32,
validation_split = 0.2)
Пожалуйста, есть идеи?