# Source code for opacus.distributed

#!/usr/bin/env python3
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#
# Unless required by applicable law or agreed to in writing, software
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and

import torch
import torch.nn as nn

"""
For all parameters of a given model averages gradients over all workers

Args:
model: model

Returns:
None
"""
world_size = torch.distributed.get_world_size()
for param in model.parameters():
continue

[docs]class DifferentiallyPrivateDistributedDataParallel(nn.Module):
"""
Implements distributed data parallelism that is based on
torch.distributed package at the module level.

"""

def __init__(self, model: nn.Module):
super().__init__()

# Synchronize the model
params = list(model.parameters())