diff --git a/examples/pytorch/bgnn/BGNN.py b/examples/pytorch/bgnn/BGNN.py index 16f2dd21432d..27643d489f38 100644 --- a/examples/pytorch/bgnn/BGNN.py +++ b/examples/pytorch/bgnn/BGNN.py @@ -209,7 +209,7 @@ def train_model(self, model_in, target_labels, train_mask, optimizer): elif self.task == "classification": loss = F.cross_entropy(pred, y.long()) else: - raise NotImplemented( + raise NotImplementedError( "Unknown task. Supported tasks: classification, regression." ) diff --git a/examples/pytorch/evolveGCN/train.py b/examples/pytorch/evolveGCN/train.py index 92484a7a1943..d588841d906a 100644 --- a/examples/pytorch/evolveGCN/train.py +++ b/examples/pytorch/evolveGCN/train.py @@ -41,7 +41,7 @@ def train(args, device): in_feats=int(g.ndata["feat"].shape[1]), num_layers=args.n_layers ) else: - return NotImplementedError("Unsupported model {}".format(args.model)) + raise NotImplementedError("Unsupported model {}".format(args.model)) model = model.to(device) optimizer = torch.optim.Adam(model.parameters(), lr=args.lr) diff --git a/examples/pytorch/han/utils.py b/examples/pytorch/han/utils.py index 0b6b6054b25f..250a6abd3c91 100644 --- a/examples/pytorch/han/utils.py +++ b/examples/pytorch/han/utils.py @@ -263,7 +263,7 @@ def load_data(dataset, remove_self_loop=False): elif dataset == "ACMRaw": return load_acm_raw(remove_self_loop) else: - return NotImplementedError("Unsupported dataset {}".format(dataset)) + raise NotImplementedError("Unsupported dataset {}".format(dataset)) class EarlyStopping(object): diff --git a/examples/pytorch/jtnn/jtnn/chemutils.py b/examples/pytorch/jtnn/jtnn/chemutils.py index 88ab8c21850f..50d797f5f472 100644 --- a/examples/pytorch/jtnn/jtnn/chemutils.py +++ b/examples/pytorch/jtnn/jtnn/chemutils.py @@ -314,7 +314,11 @@ def enum_attach_nx(ctr_mol, nei_node, amap, singletons): # Try rings first: Speed-Up -def enum_assemble_nx(node, neighbors, prev_nodes=[], prev_amap=[]): +def enum_assemble_nx(node, neighbors, prev_nodes=None, prev_amap=None): + if prev_nodes is None: + prev_nodes = [] + if prev_amap is None: + prev_amap = [] all_attach_confs = [] singletons = [ nei_node["nid"] diff --git a/examples/pytorch/jtnn/jtnn/jtnn_dec.py b/examples/pytorch/jtnn/jtnn/jtnn_dec.py index 489ff71035c9..ee99782d12a8 100644 --- a/examples/pytorch/jtnn/jtnn/jtnn_dec.py +++ b/examples/pytorch/jtnn/jtnn/jtnn_dec.py @@ -80,7 +80,9 @@ def can_assemble(mol_tree, u, v_node_dict): return len(cands) > 0 -def create_node_dict(smiles, clique=[]): +def create_node_dict(smiles, clique=None): + if clique is None: + clique = [] return dict( smiles=smiles, mol=get_mol(smiles),