paddle-pytorch API对应表
paddle-pytorch API对应表
·
PyTorch API名称 | 对应Paddle API |
torch.set_default_dtype | paddle.set_default_dtype |
torch.get_default_dtype | paddle.get_default_dtype |
torch.numel | paddle.numel |
torch.set_printoptions | paddle.set_printoptions |
torch.tensor | paddle.to_tensor |
torch.from_numpy | paddle.to_tensor |
torch.zeros | paddle.zeros |
torch.zeros_like | paddle.zeros_like |
torch.ones | paddle.ones |
torch.ones_like | paddle.ones_like |
torch.arange | paddle.arange |
torch.range | paddle.arange |
torch.linspace | paddle.linspace |
torch.eye | paddle.eye |
torch.empty | paddle.empty |
torch.empty_like | paddle.empty_like |
torch.full | paddle.full |
torch.full_like | paddle.full_like |
torch.cat | paddle.concat(x, axis=0, name=None) |
torch.chunk | paddle.chunk |
torch.gather | paddle.gather |
torch.index_select | paddle.index_select |
torch.masked_select | paddle.masked_select |
torch.narrow | paddle.slice |
torch.nonzero | paddle.nonzero |
torch.reshape | paddle.reshape |
torch.split | paddle.split |
torch.squeeze | paddle.squeeze |
torch.stack | paddle.stack |
torch.t | paddle.t |
torch.transpose | paddle.transpose |
torch.unbind | paddle.unbind |
torch.unsqueeze | paddle.nn.functional.unsqueeze |
torch.where | paddle.where |
torch.seed | paddle.seed |
torch.manual_seed | paddle.seed |
torch.get_rng_state | paddle.get_cuda_rng_state |
torch.set_rng_state | paddle.set_cuda_rng_stata |
torch.bernoulli | paddle.bernoulli |
torch.multinomial | paddle.multinomial |
torch.normal | paddle.normal |
torch.rand | paddle.rand |
torch.randint | paddle.randint |
torch.randn | paddle.randn |
torch.randperm | paddle.randperm |
torch.save | paddle.save |
torch.load | paddle.load |
torch.no_grad | paddle.no_grad |
torch.abs | paddle.abs |
torch.absolute | paddle.abs |
torch.acos | paddle.acos |
torch.arccos | paddle.acos |
torch.add | paddle.add |
torch.asin | paddle.asin |
torch.arcsin | paddle.asin |
torch.atan | paddle.atan |
torch.arctan | paddle.atan |
torch.ceil | paddle.ceil |
torch.clip | paddle.clip |
torch.conj | paddle.conj |
torch.cos | paddle.cos |
torch.cosh | paddle.cosh |
torch.div | paddle.divide |
torch.divide | paddle.divide |
torch.erf | paddle.erf |
torch.exp | paddle.exp |
torch.floor | paddle.floor |
torch.floor_divide | paddle.floor_divide |
torch.fmod | paddle.mod |
torch.imag | paddle.imag |
torch.log | paddle.log |
torch.log10 | paddle.log10 |
torch.log1p | paddle.log1p |
torch.log2 | paddle.log2 |
torch.logical_and | paddle.logical_and |
torch.logical_not | paddle.logical_not |
torch.logical_or | paddle.logical_or |
torch.logical_xor | paddle.logical_xor |
torch.mul | paddle.multiply |
torch.multiply | paddle.multiply |
torch.pow | paddle.pow |
torch.real | paddle.real |
torch.reciprocal | paddle.reciprocal |
torch.remainder | paddle.mod |
torch.round | paddle.round |
torch.rsqrt | paddle.rsqrt |
torch.sigmoid | paddle.nn.functional.sigmoid |
torch.sign | paddle.sign |
torch.sin | paddle.sin |
torch.sinh | paddle.sinh |
torch.sqrt | paddle.sqrt |
torch.square | paddle.square |
torch.sub | paddle.subtract |
torch.subtract | paddle.subtract |
torch.tanh | paddle.tanh |
torch.true_divide | paddle.divide |
torch.argmax | paddle.argmax |
torch.argmin | paddle.argmin |
torch.max | paddle.max |
torch.min | paddle.min |
torch.dist | paddle.dist |
torch.logsumexp | paddle.logsumexp |
torch.mean | paddle.mean |
torch.median | paddle.median |
torch.norm | paddle.norm |
torch.prod | paddle.prod |
torch.std | paddle.std |
torch.sum | paddle.sum |
torch.unique | paddle.unique |
torch.var | paddle.var |
torch.allclose | paddle.allclose |
torch.argsort | paddle.argsort |
torch.eq | paddle.equal |
torch.equal | paddle.equal |
torch.ge | paddle.greater_equal |
torch.greater_equal | paddle.greater_equal |
torch.gt | paddle.greater_than |
torch.greater | paddle.greater_than |
torch.isfinite | paddle.isfinite |
torch.isinf | paddle.isinf |
torch.isnan | paddle.isnan |
torch.le | paddle.less_equal |
torch.less_equal | paddle.less_equal |
torch.lt | paddle.less_than |
torch.less | paddle.less_than |
torch.maximum | paddle.maximum |
torch.minimum | paddle.minimum |
torch.ne | paddle.not_equal |
torch.not_equal | paddle.not_equal |
torch.sort | paddle.sort |
torch.topk | paddle.topk |
torch.cross | paddle.cross |
torch.cumsum | paddle.cumsum |
torch.diag | paddle.diag |
torch.diag_embed | paddle.nn.functional.diag_embed |
torch.flatten | paddle.flatten |
torch.flip | paddle.flip |
torch.meshgrid | paddle.meshgrid |
torch.roll | paddle.roll |
torch.trace | paddle.trace |
torch.tril | paddle.tril |
torch.triu | paddle.triu |
torch.addmm | paddle.addmm |
torch.bmm | paddle.bmm |
torch.cholesky | paddle.cholesky |
torch.dot | paddle.dot |
torch.inverse | paddle.inverse |
torch.matmul | paddle.matmul |
torch.mm | paddle.matmul |
torch.mv | paddle.mv |
torch.nn.Sequential | paddle.nn.Sequential |
torch.nn.ModuleList | paddle.nn.LayerList |
torch.nn.ParameterList | paddle.nn.ParamterList |
torch.nn.Conv1d | paddle.nn.Conv1D |
torch.nn.Conv2d | paddle.nn.Conv2D |
torch.nn.Conv3d | paddle.nn.Conv3D |
torch.nn.ConvTranspose1d | paddle.nn.Conv1DTranspose |
torch.nn.ConvTranspose2d | paddle.nn.Conv2DTranspose |
torch.nn.ConvTranspose3d | paddle.nn.Conv3DTranspose |
torch.nn.MaxPool1d | paddle.nn.MaxPool1D |
torch.nn.MaxPool2d | paddle.nn.MaxPool2D |
torch.nn.MaxPool3d | paddle.nn.MaxPool3D |
torch.nn.AvgPool1d | paddle.nn.AvgPool1D |
torch.nn.AvgPool2d | paddle.nn.AvgPool2D |
torch.nn.AvgPool3d | paddle.nn.AvgPool3D |
torch.nn.AdaptiveMaxPool1d | paddle.nn.AdaptiveMaxPool1D |
torch.nn.AdaptiveMaxPool2d | paddle.nn.AdaptiveMaxPool2D |
torch.nn.AdaptiveMaxPool3d | paddle.nn.AdaptiveMaxPool3D |
torch.nn.AdaptiveAvgPool1d | paddle.nn.AdaptiveAvgPool1D |
torch.nn.AdaptiveAvgPool2d | paddle.nn.AdaptiveAvgPool2D |
torch.nn.AdaptiveAvgPool3d | paddle.nn.AdaptiveAvgPool3D |
torch.nn.ELU | paddle.nn.ELU |
torch.nn.Hardshrink | paddle.nn.Hardshrink |
torch.nn.Hardsigmoid | paddle.nn.Hardsigmoid |
torch.nn.Hardtanh | paddle.nn.Hardtanh |
torch.nn.Hardswish | paddle.nn.Hardswish |
torch.nn.LeakyReLU | paddle.nn.LeakyReLU |
torch.nn.LogSigmoid | paddle.nn.LogSigmoid |
torch.nn.MultiheadAttention | paddle.nn.MultiHeadAttention |
torch.nn.PReLU | paddle.nn.PReLU |
torch.nn.ReLU | paddle.nn.ReLU |
torch.nn.ReLU6 | paddle.nn.ReLU6 |
torch.nn.SELU | paddle.nn.SELU |
torch.nn.GELU | paddle.nn.GELU |
torch.nn.Sigmoid | paddle.nn.Sigmoid |
torch.nn.Softplus | paddle.nn.Softplus |
torch.nn.Softshrink | paddle.nn.Softshrink |
torch.nn.Softsign | paddle.nn.Softsign |
torch.nn.Tanh | paddle.nn.Tanh |
torch.nn.Tanhshrink | paddle.nn.Tanhshrink |
torch.nn.Threshold | paddle.nn.ThresholdedReLU |
torch.nn.Softmax | paddle.nn.Softmax |
torch.nn.LogSoftmax | paddle.nn.LogSoftmax |
torch.nn.BatchNorm1d | paddle.nn.BatchNorm1D |
torch.nn.BatchNorm2d | paddle.nn.BatchNorm2D |
torch.nn.BatchNorm3d | paddle.nn.BatchNorm3D |
torch.nn.GroupNorm | paddle.nn.GroupNorm |
torch.nn.SyncBatchNorm | paddle.nn.SyncBatchNorm |
torch.nn.InstanceNorm1d | paddle.nn.InstanceNorm1D |
torch.nn.InstanceNorm2d | paddle.nn.InstanceNorm2D |
torch.nn.InstanceNorm3d | paddle.nn.InstanceNorm3D |
torch.nn.LayerNorm | paddle.nn.LayerNorm |
torch.nn.LocalResponseNorm | paddle.nn.LocalResponseNorm |
torch.nn.RNNBase | paddle.nn.RNNCellBase |
torch.nn.RNN | paddle.nn.RNN |
torch.nn.LSTM | paddle.nn.LSTM |
torch.nn.GRU | paddle.nn.GRU |
torch.nn.RNNCell | paddle.nn.RNNCellBase |
torch.nn.LSTMCell | paddle.nn.LSTMCell |
torch.nn.GRUCell | paddle.nn.GRUCell |
torch.nn.Transformer | paddle.nn.Transformer |
torch.nn.TransformerEncoder | paddle.nn.TransformerEncoder |
torch.nn.TransformerDecoder | paddle.nn.TransformerDecoder |
torch.nn.TransformerEncoderLayer | paddle.nn.TransformerEncoderLayer |
torch.nn.TransformerDecoderLayer | paddle.nn.TransformerDecoderLayer |
torch.nn.Linear | paddle.nn.Linear |
torch.nn.Bilinear | paddle.nn.Bilinear |
torch.nn.Dropout | paddle.nn.Dropout |
torch.nn.Dropout2d | paddle.nn.Dropout2D |
torch.nn.Dropout3d | paddle.nn.Dropout3D |
torch.nn.AlphaDropout | paddle.nn.AlphaDropout |
torch.nn.Embedding | paddle.nn.Embedding |
torch.nn.CosineSimilarity | paddle.nn.CosineSimilarity |
torch.nn.PairwiseDistance | paddle.nn.PairwiseDistance |
torch.nn.L1Loss | paddle.nn.L1Loss |
torch.nn.MSELoss | paddle.nn.MSELoss |
torch.nn.CrossEntropyLoss | paddle.nn.CrossEntropyLoss |
torch.nn.CTCLoss | paddle.nn.CTCLoss |
torch.nn.NLLLoss | paddle.nn.NLLLoss |
torch.nn.KLDivLoss | paddle.nn.KLDivLoss |
torch.nn.BCELoss | paddle.nn.BCELoss |
torch.nn.BCEWithLogitsLoss | paddle.nn.BCEWithLogitsLoss |
torch.nn.MarginRankingLoss | paddle.nn.MarginRankingLoss |
torch.nn.SmoothL1Loss | paddle.nn.SmoothL1Loss |
torch.nn.PixelShuffle | paddle.nn.PixelShuffle |
torch.nn.Upsample | paddle.nn.Upsample |
torch.nn.UpsamplingNearest2d | paddle.nn.UpsamplingNearest2D |
torch.nn.UpsamplingBilinear2d | paddle.nn.UpsamplingBilinear2D |
torch.nn.DataParallel | paddle.DataParallel |
torch.weight_norm | paddle.nn.utils.weight_norm |
torch.remove_weight_norm | paddle.nn.utils.remove_weight_norm |
torch.spectral_norm | paddle.static.nn.spectral_norm |
torch.nn.Flatten | paddle.nn.Flatten |
torch.nn.functional.avg_pool1d | paddle.nn.functional.avg_pool1d |
torch.nn.functional.threshold | paddle.nn.functional.thresholded_relu |
torch.nn.functional.batch_norm | paddle.nn.functional.batch_norm |
torch.nn.functional.linear | paddle.nn.functional.linear |
torch.nn.functional.dropout | paddle.nn.functional.dropout |
torch.nn.functional.embedding | paddle.nn.functional.embedding |
torch.nn.functional.binary_cross_entropy | paddle.nn.functional.binary_cross_entropy |
torch.nn.functional.pixel_shuffle | paddle.nn.functional.pixel_shuffle |
torch.nn.functional.conv1d | paddle.nn.functional.conv1d |
torch.nn.functional.conv2d | paddle.nn.functional.conv2d |
torch.nn.functional.conv3d | paddle.nn.functional.conv3d |
torch.nn.functional.conv_transpose1d | paddle.nn.functional.conv1d_transpose |
torch.nn.functional.conv_transpose2d | paddle.nn.functional.conv2d_transpose |
torch.nn.functional.conv_transpose3d | paddle.nn.functional.conv3d_transpose |
torch.nn.functional.unfold | paddle.nn.functional.unfold |
torch.nn.functional.avg_pool2d | paddle.nn.functional.avg_pool2d |
torch.nn.functional.avg_pool3d | paddle.nn.functional.avg_pool3d |
torch.nn.functional.max_pool1d | paddle.nn.functional.max_pool1d |
torch.nn.functional.max_pool2d | paddle.nn.functional.max_pool2d |
torch.nn.functional.max_pool3d | paddle.nn.functional.max_pool3d |
torch.nn.functional.adaptive_max_pool1d | paddle.nn.functional.adaptive_max_pool1d |
torch.nn.functional.adaptive_max_pool2d | paddle.nn.functional.adaptive_max_pool2d |
torch.nn.functional.adaptive_max_pool3d | paddle.nn.functional.adaptive_max_pool3d |
torch.nn.functional.adaptive_avg_pool1d | paddle.nn.functional.adaptive_avg_pool1d |
torch.nn.functional.adaptive_avg_pool2d | paddle.nn.functional.adaptive_avg_pool2d |
torch.nn.functional.adaptive_avg_pool3d | paddle.nn.functional.adaptive_avg_pool3d |
torch.nn.functional.relu | paddle.nn.functional.relu |
torch.nn.functional.hardtanh | paddle.nn.functional.hardtanh |
torch.nn.functional.hardswish | paddle.nn.functional.hardswish |
torch.nn.functional.relu6 | paddle.nn.functional.relu6 |
torch.nn.functional.elu | paddle.nn.functional.elu |
torch.nn.functional.selu | paddle.nn.functional.selu |
torch.nn.functional.leaky_relu | paddle.nn.functional.leaky_relu |
torch.nn.functional.prelu | paddle.nn.functional.prelu |
torch.nn.functional.gelu | paddle.nn.functional.gelu |
torch.nn.functional.logsigmoid | paddle.nn.functional.log_sigmoid |
torch.nn.functional.hardshrink | paddle.nn.functional.hardshrink |
torch.nn.functional.tanhshrink | paddle.nn.functional.tanhshrink |
torch.nn.functional.softsign | paddle.nn.functional.softsign |
torch.nn.functional.softplus | paddle.nn.functional.softplus |
torch.nn.functional.softmax | paddle.nn.functional.softmax |
torch.nn.functional.softshrink | paddle.nn.functional.softshrink |
torch.nn.functional.log_softmax | paddle.nn.functional.log_softmax |
torch.nn.functional.tanh | paddle.tanh |
torch.nn.functional.sigmoid | paddle.nn.functional.sigmoid |
torch.nn.functional.hardsigmoid | paddle.nn.functional.hardsigmoid |
torch.nn.functional.relu_ | paddle.nn.functional.relu_ |
torch.nn.functional.elu_ | paddle.nn.functional.elu_ |
torch.nn.functional.instance_norm | paddle.nn.functional.instance_norm |
torch.nn.functional.layer_norm | paddle.nn.functional.layer_norm |
torch.nn.functional.local_response_norm | paddle.nn.functional.local_response_norm |
torch.nn.functional.normalize | paddle.nn.functional.normalize |
torch.nn.functional.bilinear | paddle.nn.functional.bilinear |
torch.nn.functional.alpha_dropout | paddle.nn.functional.alpha_dropout |
torch.nn.functional.dropout2d | paddle.nn.functional.dropout2d |
torch.nn.functional.dropout3d | paddle.nn.functional.dropout3d |
torch.nn.functional.one_hot | paddle.nn.functional.one_hot |
torch.nn.functional.cosine_similarity | paddle.nn.functional.cosine_similarity |
torch.nn.functional.binary_cross_entropy_with_logits | paddle.nn.functional.binary_cross_entropy_with_logits |
torch.nn.functional.cross_entropy | paddle.nn.functional.cross_entropy |
torch.nn.functional.ctc_loss | paddle.nn.functional.ctc_loss |
torch.nn.functional.kl_div | paddle.nn.functional.kl_div |
torch.nn.functional.l1_loss | paddle.nn.functional.l1_loss |
torch.nn.functional.mse_loss | paddle.nn.functional.mse_loss |
torch.nn.functional.margin_ranking_loss | paddle.nn.functional.margin_ranking_loss |
torch.nn.functional.nll_loss | paddle.nn.functional.nll_loss |
torch.nn.functional.smooth_l1_loss | paddle.nn.functional.smooth_l1_loss |
torch.nn.functional.pad | paddle.nn.functional.pad |
torch.nn.functional.interpolate | paddle.nn.functional.interpolate |
torch.nn.functional.upsample | paddle.nn.functional.upsample |
torch.nn.functional.grid_sample | paddle.nn.functional.grid_sample |
torch.nn.functional.affine_grid | paddle.nn.functional.affine_grid |
torch.random.get_rng_state() | paddle.get_cuda_rng_state |
torch.random.manual_seed(seed) | paddle.seed |
torch.random.seed() | paddle.seed |
torch.random.set_rng_state(new_state) | paddle.set_cuda_rng_stata |
torch.optim.Optimizer | paddle.optimizer.Optimizer |
torch.optim.Adadelta | paddle.optimizer.Adadelta |
torch.optim.Adagrad | paddle.optimizer.Adagrad |
torch.optim.Adam | paddle.optimizer.Adam |
torch.optim.AdamW | paddle.optimizer.AdamW |
torch.optim.Adamax | paddle.optimizer.Adamax |
torch.optim.RMSprop | paddle.optimizer.RMSProp |
torch.optim.SGD | paddle.optimizer.SGD |
torch.optim.lr_scheduler.LambdaLR | paddle.optimizer.lr.LambdaDecay |
torch.optim.lr_scheduler.StepLR | paddle.optimizer.lr.StepDecay |
torch.optim.lr_scheduler.MultiStepLR | paddle.optimizer.lr.MultiStepDecay |
torch.optim.lr_scheduler.ExponentialLR | paddle.optimizer.lr.ExponentialDecay |
torch.optim.lr_scheduler.CosineAnnealingLR | paddle.optimizer.lr.CosineAnnealingDecay |
torch.optim.lr_scheduler.ReduceLROnPlateau | paddle.optimizer.lr.ReduceOnPlateau |
torch.nn.Module | paddle.nn.Layer |
更多推荐
已为社区贡献1条内容
所有评论(0)