Francesco Capuano commited on
Commit
ed39f0d
·
1 Parent(s): 4a197ef

fix: moving to cpu prior to serialization

Browse files
lerobot/scripts/server/policy_server.py CHANGED
@@ -236,12 +236,15 @@ class PolicyServer(async_inference_pb2_grpc.AsyncInferenceServicer):
236
  action_tensor = self._get_action_chunk(observation)
237
  action_tensor = action_tensor.squeeze(0)
238
 
 
 
 
239
  post_inference_time = time.time()
240
  logger.debug(f"Post-inference processing start: {post_inference_time - prep_time:.6f}s")
241
 
242
  if action_tensor.dim() == 1:
243
  # No chunk dimension, so repeat action to create a (dummy) chunk of actions
244
- action_tensor = action_tensor.cpu().repeat(self.actions_per_chunk, 1)
245
 
246
  action_chunk = self._time_action_chunk(
247
  observation_t.get_timestamp(), list(action_tensor), observation_t.get_timestep()
 
236
  action_tensor = self._get_action_chunk(observation)
237
  action_tensor = action_tensor.squeeze(0)
238
 
239
+ # Move to CPU before serializing
240
+ action_tensor = action_tensor.cpu()
241
+
242
  post_inference_time = time.time()
243
  logger.debug(f"Post-inference processing start: {post_inference_time - prep_time:.6f}s")
244
 
245
  if action_tensor.dim() == 1:
246
  # No chunk dimension, so repeat action to create a (dummy) chunk of actions
247
+ action_tensor = action_tensor.repeat(self.actions_per_chunk, 1)
248
 
249
  action_chunk = self._time_action_chunk(
250
  observation_t.get_timestamp(), list(action_tensor), observation_t.get_timestep()