My problem is a continue to this question How to create federated dataset from a CSV file? i manage to load a federated dataset from a given csv file and load both the train and the test data. My question now is how to reproduce a working example to build an iterative process that performs […]
- Tags 'counter': TensorSpec(shape=(None, 'counter'] LABEL_COLUMN = 'value' dataset = tf.data.Dataset.from_tensor_slices( (collections.OrderedDict(client_d, 'counter'] LABEL_COLUMN = 'value' dataset = tf.data.Dataset.from_tensor_slices( (collections.OrderedDict(client_data[features, 'value': TensorSpec(shape=(None, "Value"), ), ] what i miss again? maybe something in the layer sequential here def retrieve_model():, ]) return model, ]) return model def tff_model_fn() -> tff.learning.Model: return tff.learning.from_keras_model( keras_model=ret, 2, 6, 7, activation=tf.nn.relu), as it must specify type information for both inputs to and predictions from the model. You passed input spec {'meas_info': TensorSpec(shape=(, but it received 2 input tensors. Inputs received: [, client_data[LABEL_COLUMN].to_list()) ) global input_spec input_spec = dataset.element_spec dataset = dataset., client_data[LABEL_COLUMN].to_list()) ) global input_spec input_spec = dataset.element_spec dataset = dataset.shuffle(SHUFFLE_, counter: '', create_tf_dataset_for_client_fn=create_tf_dataset_for_client_fn ) example_dataset = train_data.create_tf_dataset_for_client(, create_tf_dataset_for_client_fn=create_tf_dataset_for_client_fn ) test_data = tff.simulation.ClientData.from_clients_and_fn(, delimiter=' ', dirs, dtype=tf.float32, filename)) df_list = [] for file in file_list: df = pd.read_csv(file, files in os.walk(working_dir): file_list = [] for filename in files: if filename.endswith('.csv'):, header=None, index_col='time') df_list.append(df[["value"]]) if df_list: rawdata = pd.concat(df_list) client_ids = d, input_shape[1]), input_spec=example_dataset.element_spec, layers.Dense(1) ]) def retrieve_model(): model = tf.keras.models.Sequential([ tf.keras.layers.LSTM(2, loss=loss_builder(), metrics = iterative_process.next(server_state, metrics=metrics_builder()) iterative_process = tff.learning.build_federated_averaging_process( tff_model_fn, My problem is a continue to this question How to create federated dataset from a CSV file? i manage to load a federated dataset from a given, na_filter=True, na_values=["NIL"], name=None)), name=None)}. thnx to @Zachary Garrett i solve the above error with his help by adding these line of code client_data = df[df['value'] == c, names=["meas_info", Parameters.client_adam_optimizer_fn) server_state = iterative_process.initialize() for round_num in range(Parameters.FLAGS.total_rou, Parameters.server_adam_optimizer_fn, replace=False) sampled_train_data = [ train_data.create_tf_dataset_for_client(client) for client in sampled_c, return_sequences=True, sampled_train_data) train_metrics = metrics['train'] print(metrics) if __name__ == '__main__': app.run(main) def start(, size=Parameters.FLAGS.train_clients_per_round, tf.keras.layers.Activation(tf.nn.softmax), tf.keras.layers.Dense(256, Time, usecols=[1