This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def myfunc(a: int = 1, b: str = "", c: list = []): | |
pass |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def myfunc(a, b, c): | |
pass | |
config = { | |
"a": 1, | |
"b": 2, | |
"c": 5 | |
} | |
myfunc(**config) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def myfunc(a, b, c, d, e, f): | |
# Do stuff | |
pass | |
myfunc(a=1, b="foo", c=42, d=100, f="bar") |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
class EmptyClass: | |
pass | |
def some_function(): | |
pass |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
for i in range(n): | |
# Do stuff... | |
if condition_is_met: | |
break | |
else: | |
# This will be executed only if break is never read. |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
for i in range(n): | |
# Do stuff... | |
if condition_is_met: | |
break |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
for i in range(n): | |
try: | |
data = next(dataiter) | |
except ValueError: | |
continue | |
# The code here is ignored if ValueError is raised in the "try" block | |
# Otherwise, run the rest of the loop here ... |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
epoch_history["loss"].append(sum(batch_history["loss"]) / len(batch_history["loss"])) | |
epoch_history["accuracy"].append(sum(batch_history["accuracy"]) / len(batch_history["accuracy"])) | |
model.eval() | |
print("Validation...") | |
val_loss, val_accuracy = validate(model, valid_loader) | |
epoch_history["val_loss"].append(val_loss) | |
epoch_history["val_accuracy"].append(val_accuracy) | |
print(f"{epoch_history=}") |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from tqdm import tqdm, trange | |
for i in trange(epochs, unit="epoch", desc="Train"): | |
model.train() | |
with tqdm(train_loader, desc="Train") as tbatch: | |
for i, (samples, targets) in enumerate(tbatch): | |
model.train() | |
samples = samples.to(device).long() | |
targets = targets.to(device) | |
model.zero_grad() | |
predictions, _ = model(samples.transpose(0, 1)) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
model = BiLSTM(input_dim, embedding_dim, hidden_dim) | |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu") | |
criterion = nn.BCELoss().to(device) | |
optimizer = torch.optim.Adam(model.parameters()) | |
model.to(device) | |
batch_history = { | |
"loss": [], | |
"accuracy": [] | |
} | |
epoch_history = { |
NewerOlder