Init code
This commit is contained in:
3
.gitignore
vendored
3
.gitignore
vendored
@@ -174,3 +174,6 @@ cython_debug/
|
|||||||
# PyPI configuration file
|
# PyPI configuration file
|
||||||
.pypirc
|
.pypirc
|
||||||
|
|
||||||
|
|
||||||
|
source/
|
||||||
|
output/
|
||||||
1
.python-version
Normal file
1
.python-version
Normal file
@@ -0,0 +1 @@
|
|||||||
|
3.12
|
||||||
173
main.py
Normal file
173
main.py
Normal file
@@ -0,0 +1,173 @@
|
|||||||
|
import logging
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import torch
|
||||||
|
import numpy as np
|
||||||
|
from omegaconf import OmegaConf, DictConfig
|
||||||
|
|
||||||
|
from src.utils import utils
|
||||||
|
from src.utils.torch import img2tensor, check_dim_and_resize, tensor2img
|
||||||
|
from src.utils.build import build_from_cfg
|
||||||
|
from src.utils.padder import InputPadder
|
||||||
|
|
||||||
|
|
||||||
|
logging.basicConfig(
|
||||||
|
level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class Anchor:
|
||||||
|
def __init__(self, resolution: int, memory: int, memory_bias: int) -> None:
|
||||||
|
self.resolution = resolution
|
||||||
|
self.memory = memory
|
||||||
|
self.memory_bias = memory_bias
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return f"Anchor(resolution={self.resolution}, memory={self.memory}, memory_bias={self.memory_bias})"
|
||||||
|
|
||||||
|
|
||||||
|
class ModelRunner:
|
||||||
|
def __init__(self, config: Path, ckpt_path: Path, device: torch.device) -> None:
|
||||||
|
"""Initializes the ModelRunner with configuration and checkpoint.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
config (Path): Path to model configuration in YAML format
|
||||||
|
ckpt_path (Path): Path to model checkpoint in .pth format
|
||||||
|
device (torch.device): Device to load the model on
|
||||||
|
"""
|
||||||
|
omega_config = OmegaConf.load(config)
|
||||||
|
network_config: DictConfig = omega_config.network
|
||||||
|
logging.info(
|
||||||
|
f"Loaded network configuration: {network_config} from [{ckpt_path}]"
|
||||||
|
)
|
||||||
|
model = build_from_cfg(network_config)
|
||||||
|
checkpoint = torch.load(ckpt_path, map_location=device, weights_only=False)
|
||||||
|
model.load_state_dict(checkpoint["state_dict"])
|
||||||
|
model = model.to(get_device())
|
||||||
|
model.eval()
|
||||||
|
self.model = model
|
||||||
|
|
||||||
|
|
||||||
|
def get_vram_available(device: torch.device) -> int:
|
||||||
|
"""Returns the available VRAM in bytes."""
|
||||||
|
if device.type == "cuda" and torch.cuda.is_available():
|
||||||
|
return torch.cuda.get_device_properties(
|
||||||
|
device
|
||||||
|
).total_memory - torch.cuda.memory_allocated(device)
|
||||||
|
elif device.type == "mps" and torch.mps.is_available():
|
||||||
|
# MPS does not provide a way to query available memory, so we return a large number to avoid issues
|
||||||
|
return torch.mps.recommended_max_memory()
|
||||||
|
else:
|
||||||
|
return 1
|
||||||
|
|
||||||
|
|
||||||
|
def get_device():
|
||||||
|
"""Detects and returns the best available device for PyTorch computation.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
torch.device: CUDA device if available, MPS device for Apple Silicon if available, otherwise CPU.
|
||||||
|
"""
|
||||||
|
if torch.cuda.is_available():
|
||||||
|
logging.info("Using CUDA-enabled GPU")
|
||||||
|
return torch.device("cuda")
|
||||||
|
elif torch.mps.is_available():
|
||||||
|
logging.info("Using Apple Silicon GPU (MPS)")
|
||||||
|
return torch.device("mps")
|
||||||
|
logging.info("No GPU available, using CPU")
|
||||||
|
return torch.device("cpu")
|
||||||
|
|
||||||
|
|
||||||
|
class ImageInterpolator:
|
||||||
|
def __init__(self, device: torch.device, anchor: Anchor, model_runner: ModelRunner):
|
||||||
|
self.device = device
|
||||||
|
self.anchor = anchor
|
||||||
|
self.vram_available = get_vram_available(device)
|
||||||
|
self.embt = torch.tensor(1 / 2).float().view(1, 1, 1, 1).to(device)
|
||||||
|
self.model_runner = model_runner
|
||||||
|
logging.info(
|
||||||
|
f"Initialized ImageInterpolator with device: {device}, anchor: {anchor}, available VRAM: {self.vram_available} bytes"
|
||||||
|
)
|
||||||
|
|
||||||
|
def interpolate(self, image1: Path, image2: Path, output_path: Path):
|
||||||
|
logging.info(f"Reading images: {image1} and {image2}")
|
||||||
|
tensor1 = img2tensor(utils.read(image1)).to(self.device)
|
||||||
|
tensor2 = img2tensor(utils.read(image2)).to(self.device)
|
||||||
|
logging.info(
|
||||||
|
f"Image shapes after conversion to tensors: {tensor1.shape}, {tensor2.shape}"
|
||||||
|
)
|
||||||
|
tensor1, tensor2 = check_dim_and_resize(tensor1, tensor2)
|
||||||
|
logging.info(f"Image shapes after resizing: {tensor1.shape}, {tensor2.shape}")
|
||||||
|
h, w = tensor1.shape[2], tensor1.shape[3]
|
||||||
|
logging.info(f"Interpolating images of size: {h}x{w}")
|
||||||
|
|
||||||
|
scale = self.scale(h, w)
|
||||||
|
logging.info(f"Calculated scale factor: {scale:.2f}")
|
||||||
|
padding = int(16 / scale)
|
||||||
|
logging.info(f"Calculated padding: {padding} pixels")
|
||||||
|
padder = InputPadder(tensor1.shape, divisor=padding)
|
||||||
|
tensor1_padded, tensor2_padded = padder.pad(tensor1, tensor2)
|
||||||
|
logging.info(
|
||||||
|
f"Image shapes after padding: {tensor1_padded.shape}, {tensor2_padded.shape}"
|
||||||
|
)
|
||||||
|
|
||||||
|
tensor1_padded = tensor1_padded.to(self.device)
|
||||||
|
tensor2_padded = tensor2_padded.to(self.device)
|
||||||
|
logging.info("Running model inference for interpolation")
|
||||||
|
with torch.no_grad():
|
||||||
|
interpolated = self.model_runner.model(
|
||||||
|
tensor1_padded, tensor2_padded, self.embt, scale_factor=scale, eval=True
|
||||||
|
)["imgt_pred"]
|
||||||
|
logging.info(f"Interpolated image shape before unpadding: {interpolated.shape}")
|
||||||
|
(interpolated,) = padder.unpad(interpolated)
|
||||||
|
logging.info(f"Interpolated image shape after unpadding: {interpolated.shape}")
|
||||||
|
utils.write(output_path, tensor2img(interpolated.cpu()))
|
||||||
|
logging.info(f"Saved interpolated image to: {output_path}")
|
||||||
|
|
||||||
|
def scale(self, height: int, width: int) -> float:
|
||||||
|
scale = (
|
||||||
|
self.anchor.resolution
|
||||||
|
/ (height * width)
|
||||||
|
* np.sqrt(
|
||||||
|
(self.vram_available - self.anchor.memory_bias) / self.anchor.memory
|
||||||
|
)
|
||||||
|
)
|
||||||
|
scale = 1 if scale > 1 else scale
|
||||||
|
scale = 1 / np.floor(1 / np.sqrt(scale) * 16) * 16
|
||||||
|
if scale < 1:
|
||||||
|
logging.info(
|
||||||
|
f"Due to the limited VRAM, the video will be scaled by {scale:.2f}"
|
||||||
|
)
|
||||||
|
return scale
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
config_path = Path("src/config/AMT-G.yaml")
|
||||||
|
ckpt_path = Path("src/pretrained/amt-g.pth")
|
||||||
|
image1_path = Path("source/img0.png")
|
||||||
|
image2_path = Path("source/img1.png")
|
||||||
|
output_path = Path("output/interpolated_image.png")
|
||||||
|
|
||||||
|
device = get_device()
|
||||||
|
model_runner = ModelRunner(config_path, ckpt_path, device)
|
||||||
|
if device.type in ("cpu", "mps"):
|
||||||
|
if device.type == "mps":
|
||||||
|
logging.warning(
|
||||||
|
"Running on Apple Silicon GPU (MPS) may have limited performance. Consider using a CUDA-enabled GPU for better performance."
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logging.warning(
|
||||||
|
"Running on CPU may be very slow. Consider using a GPU for better performance."
|
||||||
|
)
|
||||||
|
anchor = Anchor(resolution=8192 * 8192, memory=1, memory_bias=0)
|
||||||
|
elif device.type == "cuda":
|
||||||
|
anchor = Anchor(
|
||||||
|
resolution=1024 * 512, memory=1500 * 1024**2, memory_bias=2500 * 1024**2
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
raise Exception(f"Unsupported device type: {device.type}")
|
||||||
|
interpolator = ImageInterpolator(device, anchor, model_runner)
|
||||||
|
interpolator.interpolate(image1_path, image2_path, output_path)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
187
networks/AMT-G.py
Executable file
187
networks/AMT-G.py
Executable file
@@ -0,0 +1,187 @@
|
|||||||
|
import torch
|
||||||
|
import torch.nn as nn
|
||||||
|
from networks.blocks.raft import coords_grid, BasicUpdateBlock, BidirCorrBlock
|
||||||
|
from networks.blocks.feat_enc import LargeEncoder
|
||||||
|
from networks.blocks.ifrnet import resize, Encoder, InitDecoder, IntermediateDecoder
|
||||||
|
from networks.blocks.multi_flow import multi_flow_combine, MultiFlowDecoder
|
||||||
|
|
||||||
|
|
||||||
|
class Model(nn.Module):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
corr_radius=3,
|
||||||
|
corr_lvls=4,
|
||||||
|
num_flows=5,
|
||||||
|
channels=[84, 96, 112, 128],
|
||||||
|
skip_channels=84,
|
||||||
|
):
|
||||||
|
super(Model, self).__init__()
|
||||||
|
self.radius = corr_radius
|
||||||
|
self.corr_levels = corr_lvls
|
||||||
|
self.num_flows = num_flows
|
||||||
|
|
||||||
|
self.feat_encoder = LargeEncoder(
|
||||||
|
output_dim=128, norm_fn="instance", dropout=0.0
|
||||||
|
)
|
||||||
|
self.encoder = Encoder(channels, large=True)
|
||||||
|
self.decoder4 = InitDecoder(channels[3], channels[2], skip_channels)
|
||||||
|
self.decoder3 = IntermediateDecoder(channels[2], channels[1], skip_channels)
|
||||||
|
self.decoder2 = IntermediateDecoder(channels[1], channels[0], skip_channels)
|
||||||
|
self.decoder1 = MultiFlowDecoder(channels[0], skip_channels, num_flows)
|
||||||
|
|
||||||
|
self.update4 = self._get_updateblock(112, None)
|
||||||
|
self.update3_low = self._get_updateblock(96, 2.0)
|
||||||
|
self.update2_low = self._get_updateblock(84, 4.0)
|
||||||
|
|
||||||
|
self.update3_high = self._get_updateblock(96, None)
|
||||||
|
self.update2_high = self._get_updateblock(84, None)
|
||||||
|
|
||||||
|
self.comb_block = nn.Sequential(
|
||||||
|
nn.Conv2d(3 * self.num_flows, 6 * self.num_flows, 7, 1, 3),
|
||||||
|
nn.PReLU(6 * self.num_flows),
|
||||||
|
nn.Conv2d(6 * self.num_flows, 3, 7, 1, 3),
|
||||||
|
)
|
||||||
|
|
||||||
|
def _get_updateblock(self, cdim, scale_factor=None):
|
||||||
|
return BasicUpdateBlock(
|
||||||
|
cdim=cdim,
|
||||||
|
hidden_dim=192,
|
||||||
|
flow_dim=64,
|
||||||
|
corr_dim=256,
|
||||||
|
corr_dim2=192,
|
||||||
|
fc_dim=188,
|
||||||
|
scale_factor=scale_factor,
|
||||||
|
corr_levels=self.corr_levels,
|
||||||
|
radius=self.radius,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _corr_scale_lookup(self, corr_fn, coord, flow0, flow1, embt, downsample=1):
|
||||||
|
# convert t -> 0 to 0 -> 1 | convert t -> 1 to 1 -> 0
|
||||||
|
# based on linear assumption
|
||||||
|
t1_scale = 1.0 / embt
|
||||||
|
t0_scale = 1.0 / (1.0 - embt)
|
||||||
|
if downsample != 1:
|
||||||
|
inv = 1 / downsample
|
||||||
|
flow0 = inv * resize(flow0, scale_factor=inv)
|
||||||
|
flow1 = inv * resize(flow1, scale_factor=inv)
|
||||||
|
|
||||||
|
corr0, corr1 = corr_fn(coord + flow1 * t1_scale, coord + flow0 * t0_scale)
|
||||||
|
corr = torch.cat([corr0, corr1], dim=1)
|
||||||
|
flow = torch.cat([flow0, flow1], dim=1)
|
||||||
|
return corr, flow
|
||||||
|
|
||||||
|
def forward(self, img0, img1, embt, scale_factor=1.0, eval=False, **kwargs):
|
||||||
|
mean_ = (
|
||||||
|
torch.cat([img0, img1], 2)
|
||||||
|
.mean(1, keepdim=True)
|
||||||
|
.mean(2, keepdim=True)
|
||||||
|
.mean(3, keepdim=True)
|
||||||
|
)
|
||||||
|
img0 = img0 - mean_
|
||||||
|
img1 = img1 - mean_
|
||||||
|
img0_ = resize(img0, scale_factor) if scale_factor != 1.0 else img0
|
||||||
|
img1_ = resize(img1, scale_factor) if scale_factor != 1.0 else img1
|
||||||
|
b, _, h, w = img0_.shape
|
||||||
|
coord = coords_grid(b, h // 8, w // 8, img0.device)
|
||||||
|
|
||||||
|
fmap0, fmap1 = self.feat_encoder([img0_, img1_]) # [1, 128, H//8, W//8]
|
||||||
|
corr_fn = BidirCorrBlock(
|
||||||
|
fmap0, fmap1, radius=self.radius, num_levels=self.corr_levels
|
||||||
|
)
|
||||||
|
|
||||||
|
# f0_1: [1, c0, H//2, W//2] | f0_2: [1, c1, H//4, W//4]
|
||||||
|
# f0_3: [1, c2, H//8, W//8] | f0_4: [1, c3, H//16, W//16]
|
||||||
|
f0_1, f0_2, f0_3, f0_4 = self.encoder(img0_)
|
||||||
|
f1_1, f1_2, f1_3, f1_4 = self.encoder(img1_)
|
||||||
|
|
||||||
|
######################################### the 4th decoder #########################################
|
||||||
|
up_flow0_4, up_flow1_4, ft_3_ = self.decoder4(f0_4, f1_4, embt)
|
||||||
|
corr_4, flow_4 = self._corr_scale_lookup(
|
||||||
|
corr_fn, coord, up_flow0_4, up_flow1_4, embt, downsample=1
|
||||||
|
)
|
||||||
|
|
||||||
|
# residue update with lookup corr
|
||||||
|
delta_ft_3_, delta_flow_4 = self.update4(ft_3_, flow_4, corr_4)
|
||||||
|
delta_flow0_4, delta_flow1_4 = torch.chunk(delta_flow_4, 2, 1)
|
||||||
|
up_flow0_4 = up_flow0_4 + delta_flow0_4
|
||||||
|
up_flow1_4 = up_flow1_4 + delta_flow1_4
|
||||||
|
ft_3_ = ft_3_ + delta_ft_3_
|
||||||
|
|
||||||
|
######################################### the 3rd decoder #########################################
|
||||||
|
up_flow0_3, up_flow1_3, ft_2_ = self.decoder3(
|
||||||
|
ft_3_, f0_3, f1_3, up_flow0_4, up_flow1_4
|
||||||
|
)
|
||||||
|
corr_3, flow_3 = self._corr_scale_lookup(
|
||||||
|
corr_fn, coord, up_flow0_3, up_flow1_3, embt, downsample=2
|
||||||
|
)
|
||||||
|
|
||||||
|
# residue update with lookup corr
|
||||||
|
delta_ft_2_, delta_flow_3 = self.update3_low(ft_2_, flow_3, corr_3)
|
||||||
|
delta_flow0_3, delta_flow1_3 = torch.chunk(delta_flow_3, 2, 1)
|
||||||
|
up_flow0_3 = up_flow0_3 + delta_flow0_3
|
||||||
|
up_flow1_3 = up_flow1_3 + delta_flow1_3
|
||||||
|
ft_2_ = ft_2_ + delta_ft_2_
|
||||||
|
|
||||||
|
# residue update with lookup corr (hr)
|
||||||
|
corr_3 = resize(corr_3, scale_factor=2.0)
|
||||||
|
up_flow_3 = torch.cat([up_flow0_3, up_flow1_3], dim=1)
|
||||||
|
delta_ft_2_, delta_up_flow_3 = self.update3_high(ft_2_, up_flow_3, corr_3)
|
||||||
|
ft_2_ += delta_ft_2_
|
||||||
|
up_flow0_3 += delta_up_flow_3[:, 0:2]
|
||||||
|
up_flow1_3 += delta_up_flow_3[:, 2:4]
|
||||||
|
|
||||||
|
######################################### the 2nd decoder #########################################
|
||||||
|
up_flow0_2, up_flow1_2, ft_1_ = self.decoder2(
|
||||||
|
ft_2_, f0_2, f1_2, up_flow0_3, up_flow1_3
|
||||||
|
)
|
||||||
|
corr_2, flow_2 = self._corr_scale_lookup(
|
||||||
|
corr_fn, coord, up_flow0_2, up_flow1_2, embt, downsample=4
|
||||||
|
)
|
||||||
|
|
||||||
|
# residue update with lookup corr
|
||||||
|
delta_ft_1_, delta_flow_2 = self.update2_low(ft_1_, flow_2, corr_2)
|
||||||
|
delta_flow0_2, delta_flow1_2 = torch.chunk(delta_flow_2, 2, 1)
|
||||||
|
up_flow0_2 = up_flow0_2 + delta_flow0_2
|
||||||
|
up_flow1_2 = up_flow1_2 + delta_flow1_2
|
||||||
|
ft_1_ = ft_1_ + delta_ft_1_
|
||||||
|
|
||||||
|
# residue update with lookup corr (hr)
|
||||||
|
corr_2 = resize(corr_2, scale_factor=4.0)
|
||||||
|
up_flow_2 = torch.cat([up_flow0_2, up_flow1_2], dim=1)
|
||||||
|
delta_ft_1_, delta_up_flow_2 = self.update2_high(ft_1_, up_flow_2, corr_2)
|
||||||
|
ft_1_ += delta_ft_1_
|
||||||
|
up_flow0_2 += delta_up_flow_2[:, 0:2]
|
||||||
|
up_flow1_2 += delta_up_flow_2[:, 2:4]
|
||||||
|
|
||||||
|
######################################### the 1st decoder #########################################
|
||||||
|
up_flow0_1, up_flow1_1, mask, img_res = self.decoder1(
|
||||||
|
ft_1_, f0_1, f1_1, up_flow0_2, up_flow1_2
|
||||||
|
)
|
||||||
|
|
||||||
|
if scale_factor != 1.0:
|
||||||
|
up_flow0_1 = resize(up_flow0_1, scale_factor=(1.0 / scale_factor)) * (
|
||||||
|
1.0 / scale_factor
|
||||||
|
)
|
||||||
|
up_flow1_1 = resize(up_flow1_1, scale_factor=(1.0 / scale_factor)) * (
|
||||||
|
1.0 / scale_factor
|
||||||
|
)
|
||||||
|
mask = resize(mask, scale_factor=(1.0 / scale_factor))
|
||||||
|
img_res = resize(img_res, scale_factor=(1.0 / scale_factor))
|
||||||
|
|
||||||
|
# Merge multiple predictions
|
||||||
|
imgt_pred = multi_flow_combine(
|
||||||
|
self.comb_block, img0, img1, up_flow0_1, up_flow1_1, mask, img_res, mean_
|
||||||
|
)
|
||||||
|
imgt_pred = torch.clamp(imgt_pred, 0, 1)
|
||||||
|
|
||||||
|
if eval:
|
||||||
|
return {"imgt_pred": imgt_pred}
|
||||||
|
else:
|
||||||
|
up_flow0_1 = up_flow0_1.reshape(b, self.num_flows, 2, h, w)
|
||||||
|
up_flow1_1 = up_flow1_1.reshape(b, self.num_flows, 2, h, w)
|
||||||
|
return {
|
||||||
|
"imgt_pred": imgt_pred,
|
||||||
|
"flow0_pred": [up_flow0_1, up_flow0_2, up_flow0_3, up_flow0_4],
|
||||||
|
"flow1_pred": [up_flow1_1, up_flow1_2, up_flow1_3, up_flow1_4],
|
||||||
|
"ft_pred": [ft_1_, ft_2_, ft_3_],
|
||||||
|
}
|
||||||
155
networks/AMT-L.py
Executable file
155
networks/AMT-L.py
Executable file
@@ -0,0 +1,155 @@
|
|||||||
|
import torch
|
||||||
|
import torch.nn as nn
|
||||||
|
from networks.blocks.raft import (
|
||||||
|
coords_grid,
|
||||||
|
BasicUpdateBlock, BidirCorrBlock
|
||||||
|
)
|
||||||
|
from networks.blocks.feat_enc import (
|
||||||
|
BasicEncoder
|
||||||
|
)
|
||||||
|
from networks.blocks.ifrnet import (
|
||||||
|
resize,
|
||||||
|
Encoder,
|
||||||
|
InitDecoder,
|
||||||
|
IntermediateDecoder
|
||||||
|
)
|
||||||
|
from networks.blocks.multi_flow import (
|
||||||
|
multi_flow_combine,
|
||||||
|
MultiFlowDecoder
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class Model(nn.Module):
|
||||||
|
def __init__(self,
|
||||||
|
corr_radius=3,
|
||||||
|
corr_lvls=4,
|
||||||
|
num_flows=5,
|
||||||
|
channels=[48, 64, 72, 128],
|
||||||
|
skip_channels=48
|
||||||
|
):
|
||||||
|
super(Model, self).__init__()
|
||||||
|
self.radius = corr_radius
|
||||||
|
self.corr_levels = corr_lvls
|
||||||
|
self.num_flows = num_flows
|
||||||
|
|
||||||
|
self.feat_encoder = BasicEncoder(output_dim=128, norm_fn='instance', dropout=0.)
|
||||||
|
self.encoder = Encoder([48, 64, 72, 128], large=True)
|
||||||
|
|
||||||
|
self.decoder4 = InitDecoder(channels[3], channels[2], skip_channels)
|
||||||
|
self.decoder3 = IntermediateDecoder(channels[2], channels[1], skip_channels)
|
||||||
|
self.decoder2 = IntermediateDecoder(channels[1], channels[0], skip_channels)
|
||||||
|
self.decoder1 = MultiFlowDecoder(channels[0], skip_channels, num_flows)
|
||||||
|
|
||||||
|
self.update4 = self._get_updateblock(72, None)
|
||||||
|
self.update3 = self._get_updateblock(64, 2.0)
|
||||||
|
self.update2 = self._get_updateblock(48, 4.0)
|
||||||
|
|
||||||
|
self.comb_block = nn.Sequential(
|
||||||
|
nn.Conv2d(3*self.num_flows, 6*self.num_flows, 7, 1, 3),
|
||||||
|
nn.PReLU(6*self.num_flows),
|
||||||
|
nn.Conv2d(6*self.num_flows, 3, 7, 1, 3),
|
||||||
|
)
|
||||||
|
|
||||||
|
def _get_updateblock(self, cdim, scale_factor=None):
|
||||||
|
return BasicUpdateBlock(cdim=cdim, hidden_dim=128, flow_dim=48,
|
||||||
|
corr_dim=256, corr_dim2=160, fc_dim=124,
|
||||||
|
scale_factor=scale_factor, corr_levels=self.corr_levels,
|
||||||
|
radius=self.radius)
|
||||||
|
|
||||||
|
def _corr_scale_lookup(self, corr_fn, coord, flow0, flow1, embt, downsample=1):
|
||||||
|
# convert t -> 0 to 0 -> 1 | convert t -> 1 to 1 -> 0
|
||||||
|
# based on linear assumption
|
||||||
|
t1_scale = 1. / embt
|
||||||
|
t0_scale = 1. / (1. - embt)
|
||||||
|
if downsample != 1:
|
||||||
|
inv = 1 / downsample
|
||||||
|
flow0 = inv * resize(flow0, scale_factor=inv)
|
||||||
|
flow1 = inv * resize(flow1, scale_factor=inv)
|
||||||
|
|
||||||
|
corr0, corr1 = corr_fn(coord + flow1 * t1_scale, coord + flow0 * t0_scale)
|
||||||
|
corr = torch.cat([corr0, corr1], dim=1)
|
||||||
|
flow = torch.cat([flow0, flow1], dim=1)
|
||||||
|
return corr, flow
|
||||||
|
|
||||||
|
def forward(self, img0, img1, embt, scale_factor=1.0, eval=False, **kwargs):
|
||||||
|
mean_ = torch.cat([img0, img1], 2).mean(1, keepdim=True).mean(2, keepdim=True).mean(3, keepdim=True)
|
||||||
|
img0 = img0 - mean_
|
||||||
|
img1 = img1 - mean_
|
||||||
|
img0_ = resize(img0, scale_factor) if scale_factor != 1.0 else img0
|
||||||
|
img1_ = resize(img1, scale_factor) if scale_factor != 1.0 else img1
|
||||||
|
b, _, h, w = img0_.shape
|
||||||
|
coord = coords_grid(b, h // 8, w // 8, img0.device)
|
||||||
|
|
||||||
|
fmap0, fmap1 = self.feat_encoder([img0_, img1_]) # [1, 128, H//8, W//8]
|
||||||
|
corr_fn = BidirCorrBlock(fmap0, fmap1, radius=self.radius, num_levels=self.corr_levels)
|
||||||
|
|
||||||
|
# f0_1: [1, c0, H//2, W//2] | f0_2: [1, c1, H//4, W//4]
|
||||||
|
# f0_3: [1, c2, H//8, W//8] | f0_4: [1, c3, H//16, W//16]
|
||||||
|
f0_1, f0_2, f0_3, f0_4 = self.encoder(img0_)
|
||||||
|
f1_1, f1_2, f1_3, f1_4 = self.encoder(img1_)
|
||||||
|
|
||||||
|
######################################### the 4th decoder #########################################
|
||||||
|
up_flow0_4, up_flow1_4, ft_3_ = self.decoder4(f0_4, f1_4, embt)
|
||||||
|
corr_4, flow_4 = self._corr_scale_lookup(corr_fn, coord,
|
||||||
|
up_flow0_4, up_flow1_4,
|
||||||
|
embt, downsample=1)
|
||||||
|
|
||||||
|
# residue update with lookup corr
|
||||||
|
delta_ft_3_, delta_flow_4 = self.update4(ft_3_, flow_4, corr_4)
|
||||||
|
delta_flow0_4, delta_flow1_4 = torch.chunk(delta_flow_4, 2, 1)
|
||||||
|
up_flow0_4 = up_flow0_4 + delta_flow0_4
|
||||||
|
up_flow1_4 = up_flow1_4 + delta_flow1_4
|
||||||
|
ft_3_ = ft_3_ + delta_ft_3_
|
||||||
|
|
||||||
|
######################################### the 3rd decoder #########################################
|
||||||
|
up_flow0_3, up_flow1_3, ft_2_ = self.decoder3(ft_3_, f0_3, f1_3, up_flow0_4, up_flow1_4)
|
||||||
|
corr_3, flow_3 = self._corr_scale_lookup(corr_fn,
|
||||||
|
coord, up_flow0_3, up_flow1_3,
|
||||||
|
embt, downsample=2)
|
||||||
|
|
||||||
|
# residue update with lookup corr
|
||||||
|
delta_ft_2_, delta_flow_3 = self.update3(ft_2_, flow_3, corr_3)
|
||||||
|
delta_flow0_3, delta_flow1_3 = torch.chunk(delta_flow_3, 2, 1)
|
||||||
|
up_flow0_3 = up_flow0_3 + delta_flow0_3
|
||||||
|
up_flow1_3 = up_flow1_3 + delta_flow1_3
|
||||||
|
ft_2_ = ft_2_ + delta_ft_2_
|
||||||
|
|
||||||
|
######################################### the 2nd decoder #########################################
|
||||||
|
up_flow0_2, up_flow1_2, ft_1_ = self.decoder2(ft_2_, f0_2, f1_2, up_flow0_3, up_flow1_3)
|
||||||
|
corr_2, flow_2 = self._corr_scale_lookup(corr_fn,
|
||||||
|
coord, up_flow0_2, up_flow1_2,
|
||||||
|
embt, downsample=4)
|
||||||
|
|
||||||
|
# residue update with lookup corr
|
||||||
|
delta_ft_1_, delta_flow_2 = self.update2(ft_1_, flow_2, corr_2)
|
||||||
|
delta_flow0_2, delta_flow1_2 = torch.chunk(delta_flow_2, 2, 1)
|
||||||
|
up_flow0_2 = up_flow0_2 + delta_flow0_2
|
||||||
|
up_flow1_2 = up_flow1_2 + delta_flow1_2
|
||||||
|
ft_1_ = ft_1_ + delta_ft_1_
|
||||||
|
|
||||||
|
######################################### the 1st decoder #########################################
|
||||||
|
up_flow0_1, up_flow1_1, mask, img_res = self.decoder1(ft_1_, f0_1, f1_1, up_flow0_2, up_flow1_2)
|
||||||
|
|
||||||
|
if scale_factor != 1.0:
|
||||||
|
up_flow0_1 = resize(up_flow0_1, scale_factor=(1.0/scale_factor)) * (1.0/scale_factor)
|
||||||
|
up_flow1_1 = resize(up_flow1_1, scale_factor=(1.0/scale_factor)) * (1.0/scale_factor)
|
||||||
|
mask = resize(mask, scale_factor=(1.0/scale_factor))
|
||||||
|
img_res = resize(img_res, scale_factor=(1.0/scale_factor))
|
||||||
|
|
||||||
|
# Merge multiple predictions
|
||||||
|
imgt_pred = multi_flow_combine(self.comb_block, img0, img1, up_flow0_1, up_flow1_1,
|
||||||
|
mask, img_res, mean_)
|
||||||
|
imgt_pred = torch.clamp(imgt_pred, 0, 1)
|
||||||
|
|
||||||
|
if eval:
|
||||||
|
return { 'imgt_pred': imgt_pred, }
|
||||||
|
else:
|
||||||
|
up_flow0_1 = up_flow0_1.reshape(b, self.num_flows, 2, h, w)
|
||||||
|
up_flow1_1 = up_flow1_1.reshape(b, self.num_flows, 2, h, w)
|
||||||
|
return {
|
||||||
|
'imgt_pred': imgt_pred,
|
||||||
|
'flow0_pred': [up_flow0_1, up_flow0_2, up_flow0_3, up_flow0_4],
|
||||||
|
'flow1_pred': [up_flow1_1, up_flow1_2, up_flow1_3, up_flow1_4],
|
||||||
|
'ft_pred': [ft_1_, ft_2_, ft_3_],
|
||||||
|
}
|
||||||
|
|
||||||
154
networks/AMT-S.py
Executable file
154
networks/AMT-S.py
Executable file
@@ -0,0 +1,154 @@
|
|||||||
|
import torch
|
||||||
|
import torch.nn as nn
|
||||||
|
from networks.blocks.raft import (
|
||||||
|
coords_grid,
|
||||||
|
SmallUpdateBlock, BidirCorrBlock
|
||||||
|
)
|
||||||
|
from networks.blocks.feat_enc import (
|
||||||
|
SmallEncoder
|
||||||
|
)
|
||||||
|
from networks.blocks.ifrnet import (
|
||||||
|
resize,
|
||||||
|
Encoder,
|
||||||
|
InitDecoder,
|
||||||
|
IntermediateDecoder
|
||||||
|
)
|
||||||
|
from networks.blocks.multi_flow import (
|
||||||
|
multi_flow_combine,
|
||||||
|
MultiFlowDecoder
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class Model(nn.Module):
|
||||||
|
def __init__(self,
|
||||||
|
corr_radius=3,
|
||||||
|
corr_lvls=4,
|
||||||
|
num_flows=3,
|
||||||
|
channels=[20, 32, 44, 56],
|
||||||
|
skip_channels=20):
|
||||||
|
super(Model, self).__init__()
|
||||||
|
self.radius = corr_radius
|
||||||
|
self.corr_levels = corr_lvls
|
||||||
|
self.num_flows = num_flows
|
||||||
|
self.channels = channels
|
||||||
|
self.skip_channels = skip_channels
|
||||||
|
|
||||||
|
self.feat_encoder = SmallEncoder(output_dim=84, norm_fn='instance', dropout=0.)
|
||||||
|
self.encoder = Encoder(channels)
|
||||||
|
|
||||||
|
self.decoder4 = InitDecoder(channels[3], channels[2], skip_channels)
|
||||||
|
self.decoder3 = IntermediateDecoder(channels[2], channels[1], skip_channels)
|
||||||
|
self.decoder2 = IntermediateDecoder(channels[1], channels[0], skip_channels)
|
||||||
|
self.decoder1 = MultiFlowDecoder(channels[0], skip_channels, num_flows)
|
||||||
|
|
||||||
|
self.update4 = self._get_updateblock(44)
|
||||||
|
self.update3 = self._get_updateblock(32, 2)
|
||||||
|
self.update2 = self._get_updateblock(20, 4)
|
||||||
|
|
||||||
|
self.comb_block = nn.Sequential(
|
||||||
|
nn.Conv2d(3*num_flows, 6*num_flows, 3, 1, 1),
|
||||||
|
nn.PReLU(6*num_flows),
|
||||||
|
nn.Conv2d(6*num_flows, 3, 3, 1, 1),
|
||||||
|
)
|
||||||
|
|
||||||
|
def _get_updateblock(self, cdim, scale_factor=None):
|
||||||
|
return SmallUpdateBlock(cdim=cdim, hidden_dim=76, flow_dim=20, corr_dim=64,
|
||||||
|
fc_dim=68, scale_factor=scale_factor,
|
||||||
|
corr_levels=self.corr_levels, radius=self.radius)
|
||||||
|
|
||||||
|
def _corr_scale_lookup(self, corr_fn, coord, flow0, flow1, embt, downsample=1):
|
||||||
|
# convert t -> 0 to 0 -> 1 | convert t -> 1 to 1 -> 0
|
||||||
|
# based on linear assumption
|
||||||
|
t1_scale = 1. / embt
|
||||||
|
t0_scale = 1. / (1. - embt)
|
||||||
|
if downsample != 1:
|
||||||
|
inv = 1 / downsample
|
||||||
|
flow0 = inv * resize(flow0, scale_factor=inv)
|
||||||
|
flow1 = inv * resize(flow1, scale_factor=inv)
|
||||||
|
|
||||||
|
corr0, corr1 = corr_fn(coord + flow1 * t1_scale, coord + flow0 * t0_scale)
|
||||||
|
corr = torch.cat([corr0, corr1], dim=1)
|
||||||
|
flow = torch.cat([flow0, flow1], dim=1)
|
||||||
|
return corr, flow
|
||||||
|
|
||||||
|
def forward(self, img0, img1, embt, scale_factor=1.0, eval=False, **kwargs):
|
||||||
|
mean_ = torch.cat([img0, img1], 2).mean(1, keepdim=True).mean(2, keepdim=True).mean(3, keepdim=True)
|
||||||
|
img0 = img0 - mean_
|
||||||
|
img1 = img1 - mean_
|
||||||
|
img0_ = resize(img0, scale_factor) if scale_factor != 1.0 else img0
|
||||||
|
img1_ = resize(img1, scale_factor) if scale_factor != 1.0 else img1
|
||||||
|
b, _, h, w = img0_.shape
|
||||||
|
coord = coords_grid(b, h // 8, w // 8, img0.device)
|
||||||
|
|
||||||
|
fmap0, fmap1 = self.feat_encoder([img0_, img1_]) # [1, 128, H//8, W//8]
|
||||||
|
corr_fn = BidirCorrBlock(fmap0, fmap1, radius=self.radius, num_levels=self.corr_levels)
|
||||||
|
|
||||||
|
# f0_1: [1, c0, H//2, W//2] | f0_2: [1, c1, H//4, W//4]
|
||||||
|
# f0_3: [1, c2, H//8, W//8] | f0_4: [1, c3, H//16, W//16]
|
||||||
|
f0_1, f0_2, f0_3, f0_4 = self.encoder(img0_)
|
||||||
|
f1_1, f1_2, f1_3, f1_4 = self.encoder(img1_)
|
||||||
|
|
||||||
|
######################################### the 4th decoder #########################################
|
||||||
|
up_flow0_4, up_flow1_4, ft_3_ = self.decoder4(f0_4, f1_4, embt)
|
||||||
|
corr_4, flow_4 = self._corr_scale_lookup(corr_fn, coord,
|
||||||
|
up_flow0_4, up_flow1_4,
|
||||||
|
embt, downsample=1)
|
||||||
|
|
||||||
|
# residue update with lookup corr
|
||||||
|
delta_ft_3_, delta_flow_4 = self.update4(ft_3_, flow_4, corr_4)
|
||||||
|
delta_flow0_4, delta_flow1_4 = torch.chunk(delta_flow_4, 2, 1)
|
||||||
|
up_flow0_4 = up_flow0_4 + delta_flow0_4
|
||||||
|
up_flow1_4 = up_flow1_4 + delta_flow1_4
|
||||||
|
ft_3_ = ft_3_ + delta_ft_3_
|
||||||
|
|
||||||
|
######################################### the 3rd decoder #########################################
|
||||||
|
up_flow0_3, up_flow1_3, ft_2_ = self.decoder3(ft_3_, f0_3, f1_3, up_flow0_4, up_flow1_4)
|
||||||
|
corr_3, flow_3 = self._corr_scale_lookup(corr_fn,
|
||||||
|
coord, up_flow0_3, up_flow1_3,
|
||||||
|
embt, downsample=2)
|
||||||
|
|
||||||
|
# residue update with lookup corr
|
||||||
|
delta_ft_2_, delta_flow_3 = self.update3(ft_2_, flow_3, corr_3)
|
||||||
|
delta_flow0_3, delta_flow1_3 = torch.chunk(delta_flow_3, 2, 1)
|
||||||
|
up_flow0_3 = up_flow0_3 + delta_flow0_3
|
||||||
|
up_flow1_3 = up_flow1_3 + delta_flow1_3
|
||||||
|
ft_2_ = ft_2_ + delta_ft_2_
|
||||||
|
|
||||||
|
######################################### the 2nd decoder #########################################
|
||||||
|
up_flow0_2, up_flow1_2, ft_1_ = self.decoder2(ft_2_, f0_2, f1_2, up_flow0_3, up_flow1_3)
|
||||||
|
corr_2, flow_2 = self._corr_scale_lookup(corr_fn,
|
||||||
|
coord, up_flow0_2, up_flow1_2,
|
||||||
|
embt, downsample=4)
|
||||||
|
|
||||||
|
# residue update with lookup corr
|
||||||
|
delta_ft_1_, delta_flow_2 = self.update2(ft_1_, flow_2, corr_2)
|
||||||
|
delta_flow0_2, delta_flow1_2 = torch.chunk(delta_flow_2, 2, 1)
|
||||||
|
up_flow0_2 = up_flow0_2 + delta_flow0_2
|
||||||
|
up_flow1_2 = up_flow1_2 + delta_flow1_2
|
||||||
|
ft_1_ = ft_1_ + delta_ft_1_
|
||||||
|
|
||||||
|
######################################### the 1st decoder #########################################
|
||||||
|
up_flow0_1, up_flow1_1, mask, img_res = self.decoder1(ft_1_, f0_1, f1_1, up_flow0_2, up_flow1_2)
|
||||||
|
|
||||||
|
if scale_factor != 1.0:
|
||||||
|
up_flow0_1 = resize(up_flow0_1, scale_factor=(1.0/scale_factor)) * (1.0/scale_factor)
|
||||||
|
up_flow1_1 = resize(up_flow1_1, scale_factor=(1.0/scale_factor)) * (1.0/scale_factor)
|
||||||
|
mask = resize(mask, scale_factor=(1.0/scale_factor))
|
||||||
|
img_res = resize(img_res, scale_factor=(1.0/scale_factor))
|
||||||
|
|
||||||
|
# Merge multiple predictions
|
||||||
|
imgt_pred = multi_flow_combine(self.comb_block, img0, img1, up_flow0_1, up_flow1_1,
|
||||||
|
mask, img_res, mean_)
|
||||||
|
imgt_pred = torch.clamp(imgt_pred, 0, 1)
|
||||||
|
|
||||||
|
if eval:
|
||||||
|
return { 'imgt_pred': imgt_pred, }
|
||||||
|
else:
|
||||||
|
up_flow0_1 = up_flow0_1.reshape(b, self.num_flows, 2, h, w)
|
||||||
|
up_flow1_1 = up_flow1_1.reshape(b, self.num_flows, 2, h, w)
|
||||||
|
return {
|
||||||
|
'imgt_pred': imgt_pred,
|
||||||
|
'flow0_pred': [up_flow0_1, up_flow0_2, up_flow0_3, up_flow0_4],
|
||||||
|
'flow1_pred': [up_flow1_1, up_flow1_2, up_flow1_3, up_flow1_4],
|
||||||
|
'ft_pred': [ft_1_, ft_2_, ft_3_],
|
||||||
|
}
|
||||||
169
networks/IFRNet.py
Executable file
169
networks/IFRNet.py
Executable file
@@ -0,0 +1,169 @@
|
|||||||
|
import torch
|
||||||
|
import torch.nn as nn
|
||||||
|
from src.utils.flow_utils import warp
|
||||||
|
from networks.blocks.ifrnet import (
|
||||||
|
convrelu, resize,
|
||||||
|
ResBlock,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class Encoder(nn.Module):
|
||||||
|
def __init__(self):
|
||||||
|
super(Encoder, self).__init__()
|
||||||
|
self.pyramid1 = nn.Sequential(
|
||||||
|
convrelu(3, 32, 3, 2, 1),
|
||||||
|
convrelu(32, 32, 3, 1, 1)
|
||||||
|
)
|
||||||
|
self.pyramid2 = nn.Sequential(
|
||||||
|
convrelu(32, 48, 3, 2, 1),
|
||||||
|
convrelu(48, 48, 3, 1, 1)
|
||||||
|
)
|
||||||
|
self.pyramid3 = nn.Sequential(
|
||||||
|
convrelu(48, 72, 3, 2, 1),
|
||||||
|
convrelu(72, 72, 3, 1, 1)
|
||||||
|
)
|
||||||
|
self.pyramid4 = nn.Sequential(
|
||||||
|
convrelu(72, 96, 3, 2, 1),
|
||||||
|
convrelu(96, 96, 3, 1, 1)
|
||||||
|
)
|
||||||
|
|
||||||
|
def forward(self, img):
|
||||||
|
f1 = self.pyramid1(img)
|
||||||
|
f2 = self.pyramid2(f1)
|
||||||
|
f3 = self.pyramid3(f2)
|
||||||
|
f4 = self.pyramid4(f3)
|
||||||
|
return f1, f2, f3, f4
|
||||||
|
|
||||||
|
|
||||||
|
class Decoder4(nn.Module):
|
||||||
|
def __init__(self):
|
||||||
|
super(Decoder4, self).__init__()
|
||||||
|
self.convblock = nn.Sequential(
|
||||||
|
convrelu(192+1, 192),
|
||||||
|
ResBlock(192, 32),
|
||||||
|
nn.ConvTranspose2d(192, 76, 4, 2, 1, bias=True)
|
||||||
|
)
|
||||||
|
|
||||||
|
def forward(self, f0, f1, embt):
|
||||||
|
b, c, h, w = f0.shape
|
||||||
|
embt = embt.repeat(1, 1, h, w)
|
||||||
|
f_in = torch.cat([f0, f1, embt], 1)
|
||||||
|
f_out = self.convblock(f_in)
|
||||||
|
return f_out
|
||||||
|
|
||||||
|
|
||||||
|
class Decoder3(nn.Module):
|
||||||
|
def __init__(self):
|
||||||
|
super(Decoder3, self).__init__()
|
||||||
|
self.convblock = nn.Sequential(
|
||||||
|
convrelu(220, 216),
|
||||||
|
ResBlock(216, 32),
|
||||||
|
nn.ConvTranspose2d(216, 52, 4, 2, 1, bias=True)
|
||||||
|
)
|
||||||
|
|
||||||
|
def forward(self, ft_, f0, f1, up_flow0, up_flow1):
|
||||||
|
f0_warp = warp(f0, up_flow0)
|
||||||
|
f1_warp = warp(f1, up_flow1)
|
||||||
|
f_in = torch.cat([ft_, f0_warp, f1_warp, up_flow0, up_flow1], 1)
|
||||||
|
f_out = self.convblock(f_in)
|
||||||
|
return f_out
|
||||||
|
|
||||||
|
|
||||||
|
class Decoder2(nn.Module):
|
||||||
|
def __init__(self):
|
||||||
|
super(Decoder2, self).__init__()
|
||||||
|
self.convblock = nn.Sequential(
|
||||||
|
convrelu(148, 144),
|
||||||
|
ResBlock(144, 32),
|
||||||
|
nn.ConvTranspose2d(144, 36, 4, 2, 1, bias=True)
|
||||||
|
)
|
||||||
|
|
||||||
|
def forward(self, ft_, f0, f1, up_flow0, up_flow1):
|
||||||
|
f0_warp = warp(f0, up_flow0)
|
||||||
|
f1_warp = warp(f1, up_flow1)
|
||||||
|
f_in = torch.cat([ft_, f0_warp, f1_warp, up_flow0, up_flow1], 1)
|
||||||
|
f_out = self.convblock(f_in)
|
||||||
|
return f_out
|
||||||
|
|
||||||
|
|
||||||
|
class Decoder1(nn.Module):
|
||||||
|
def __init__(self):
|
||||||
|
super(Decoder1, self).__init__()
|
||||||
|
self.convblock = nn.Sequential(
|
||||||
|
convrelu(100, 96),
|
||||||
|
ResBlock(96, 32),
|
||||||
|
nn.ConvTranspose2d(96, 8, 4, 2, 1, bias=True)
|
||||||
|
)
|
||||||
|
|
||||||
|
def forward(self, ft_, f0, f1, up_flow0, up_flow1):
|
||||||
|
f0_warp = warp(f0, up_flow0)
|
||||||
|
f1_warp = warp(f1, up_flow1)
|
||||||
|
f_in = torch.cat([ft_, f0_warp, f1_warp, up_flow0, up_flow1], 1)
|
||||||
|
f_out = self.convblock(f_in)
|
||||||
|
return f_out
|
||||||
|
|
||||||
|
|
||||||
|
class Model(nn.Module):
|
||||||
|
def __init__(self):
|
||||||
|
super(Model, self).__init__()
|
||||||
|
self.encoder = Encoder()
|
||||||
|
self.decoder4 = Decoder4()
|
||||||
|
self.decoder3 = Decoder3()
|
||||||
|
self.decoder2 = Decoder2()
|
||||||
|
self.decoder1 = Decoder1()
|
||||||
|
|
||||||
|
def forward(self, img0, img1, embt, scale_factor=1.0, eval=False, **kwargs):
|
||||||
|
mean_ = torch.cat([img0, img1], 2).mean(1, keepdim=True).mean(2, keepdim=True).mean(3, keepdim=True)
|
||||||
|
img0 = img0 - mean_
|
||||||
|
img1 = img1 - mean_
|
||||||
|
|
||||||
|
img0_ = resize(img0, scale_factor) if scale_factor != 1.0 else img0
|
||||||
|
img1_ = resize(img1, scale_factor) if scale_factor != 1.0 else img1
|
||||||
|
|
||||||
|
f0_1, f0_2, f0_3, f0_4 = self.encoder(img0_)
|
||||||
|
f1_1, f1_2, f1_3, f1_4 = self.encoder(img1_)
|
||||||
|
|
||||||
|
out4 = self.decoder4(f0_4, f1_4, embt)
|
||||||
|
up_flow0_4 = out4[:, 0:2]
|
||||||
|
up_flow1_4 = out4[:, 2:4]
|
||||||
|
ft_3_ = out4[:, 4:]
|
||||||
|
|
||||||
|
out3 = self.decoder3(ft_3_, f0_3, f1_3, up_flow0_4, up_flow1_4)
|
||||||
|
up_flow0_3 = out3[:, 0:2] + 2.0 * resize(up_flow0_4, scale_factor=2.0)
|
||||||
|
up_flow1_3 = out3[:, 2:4] + 2.0 * resize(up_flow1_4, scale_factor=2.0)
|
||||||
|
ft_2_ = out3[:, 4:]
|
||||||
|
|
||||||
|
out2 = self.decoder2(ft_2_, f0_2, f1_2, up_flow0_3, up_flow1_3)
|
||||||
|
up_flow0_2 = out2[:, 0:2] + 2.0 * resize(up_flow0_3, scale_factor=2.0)
|
||||||
|
up_flow1_2 = out2[:, 2:4] + 2.0 * resize(up_flow1_3, scale_factor=2.0)
|
||||||
|
ft_1_ = out2[:, 4:]
|
||||||
|
|
||||||
|
out1 = self.decoder1(ft_1_, f0_1, f1_1, up_flow0_2, up_flow1_2)
|
||||||
|
up_flow0_1 = out1[:, 0:2] + 2.0 * resize(up_flow0_2, scale_factor=2.0)
|
||||||
|
up_flow1_1 = out1[:, 2:4] + 2.0 * resize(up_flow1_2, scale_factor=2.0)
|
||||||
|
up_mask_1 = torch.sigmoid(out1[:, 4:5])
|
||||||
|
up_res_1 = out1[:, 5:]
|
||||||
|
|
||||||
|
if scale_factor != 1.0:
|
||||||
|
up_flow0_1 = resize(up_flow0_1, scale_factor=(1.0/scale_factor)) * (1.0/scale_factor)
|
||||||
|
up_flow1_1 = resize(up_flow1_1, scale_factor=(1.0/scale_factor)) * (1.0/scale_factor)
|
||||||
|
up_mask_1 = resize(up_mask_1, scale_factor=(1.0/scale_factor))
|
||||||
|
up_res_1 = resize(up_res_1, scale_factor=(1.0/scale_factor))
|
||||||
|
|
||||||
|
img0_warp = warp(img0, up_flow0_1)
|
||||||
|
img1_warp = warp(img1, up_flow1_1)
|
||||||
|
imgt_merge = up_mask_1 * img0_warp + (1 - up_mask_1) * img1_warp + mean_
|
||||||
|
imgt_pred = imgt_merge + up_res_1
|
||||||
|
imgt_pred = torch.clamp(imgt_pred, 0, 1)
|
||||||
|
|
||||||
|
if eval:
|
||||||
|
return { 'imgt_pred': imgt_pred, }
|
||||||
|
else:
|
||||||
|
return {
|
||||||
|
'imgt_pred': imgt_pred,
|
||||||
|
'flow0_pred': [up_flow0_1, up_flow0_2, up_flow0_3, up_flow0_4],
|
||||||
|
'flow1_pred': [up_flow1_1, up_flow1_2, up_flow1_3, up_flow1_4],
|
||||||
|
'ft_pred': [ft_1_, ft_2_, ft_3_],
|
||||||
|
'img0_warp': img0_warp,
|
||||||
|
'img1_warp': img1_warp
|
||||||
|
}
|
||||||
0
networks/__init__.py
Executable file
0
networks/__init__.py
Executable file
0
networks/blocks/__init__.py
Executable file
0
networks/blocks/__init__.py
Executable file
343
networks/blocks/feat_enc.py
Executable file
343
networks/blocks/feat_enc.py
Executable file
@@ -0,0 +1,343 @@
|
|||||||
|
import torch
|
||||||
|
import torch.nn as nn
|
||||||
|
|
||||||
|
|
||||||
|
class BottleneckBlock(nn.Module):
|
||||||
|
def __init__(self, in_planes, planes, norm_fn='group', stride=1):
|
||||||
|
super(BottleneckBlock, self).__init__()
|
||||||
|
|
||||||
|
self.conv1 = nn.Conv2d(in_planes, planes//4, kernel_size=1, padding=0)
|
||||||
|
self.conv2 = nn.Conv2d(planes//4, planes//4, kernel_size=3, padding=1, stride=stride)
|
||||||
|
self.conv3 = nn.Conv2d(planes//4, planes, kernel_size=1, padding=0)
|
||||||
|
self.relu = nn.ReLU(inplace=True)
|
||||||
|
|
||||||
|
num_groups = planes // 8
|
||||||
|
|
||||||
|
if norm_fn == 'group':
|
||||||
|
self.norm1 = nn.GroupNorm(num_groups=num_groups, num_channels=planes//4)
|
||||||
|
self.norm2 = nn.GroupNorm(num_groups=num_groups, num_channels=planes//4)
|
||||||
|
self.norm3 = nn.GroupNorm(num_groups=num_groups, num_channels=planes)
|
||||||
|
if not stride == 1:
|
||||||
|
self.norm4 = nn.GroupNorm(num_groups=num_groups, num_channels=planes)
|
||||||
|
|
||||||
|
elif norm_fn == 'batch':
|
||||||
|
self.norm1 = nn.BatchNorm2d(planes//4)
|
||||||
|
self.norm2 = nn.BatchNorm2d(planes//4)
|
||||||
|
self.norm3 = nn.BatchNorm2d(planes)
|
||||||
|
if not stride == 1:
|
||||||
|
self.norm4 = nn.BatchNorm2d(planes)
|
||||||
|
|
||||||
|
elif norm_fn == 'instance':
|
||||||
|
self.norm1 = nn.InstanceNorm2d(planes//4)
|
||||||
|
self.norm2 = nn.InstanceNorm2d(planes//4)
|
||||||
|
self.norm3 = nn.InstanceNorm2d(planes)
|
||||||
|
if not stride == 1:
|
||||||
|
self.norm4 = nn.InstanceNorm2d(planes)
|
||||||
|
|
||||||
|
elif norm_fn == 'none':
|
||||||
|
self.norm1 = nn.Sequential()
|
||||||
|
self.norm2 = nn.Sequential()
|
||||||
|
self.norm3 = nn.Sequential()
|
||||||
|
if not stride == 1:
|
||||||
|
self.norm4 = nn.Sequential()
|
||||||
|
|
||||||
|
if stride == 1:
|
||||||
|
self.downsample = None
|
||||||
|
|
||||||
|
else:
|
||||||
|
self.downsample = nn.Sequential(
|
||||||
|
nn.Conv2d(in_planes, planes, kernel_size=1, stride=stride), self.norm4)
|
||||||
|
|
||||||
|
|
||||||
|
def forward(self, x):
|
||||||
|
y = x
|
||||||
|
y = self.relu(self.norm1(self.conv1(y)))
|
||||||
|
y = self.relu(self.norm2(self.conv2(y)))
|
||||||
|
y = self.relu(self.norm3(self.conv3(y)))
|
||||||
|
|
||||||
|
if self.downsample is not None:
|
||||||
|
x = self.downsample(x)
|
||||||
|
|
||||||
|
return self.relu(x+y)
|
||||||
|
|
||||||
|
|
||||||
|
class ResidualBlock(nn.Module):
|
||||||
|
def __init__(self, in_planes, planes, norm_fn='group', stride=1):
|
||||||
|
super(ResidualBlock, self).__init__()
|
||||||
|
|
||||||
|
self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=3, padding=1, stride=stride)
|
||||||
|
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, padding=1)
|
||||||
|
self.relu = nn.ReLU(inplace=True)
|
||||||
|
|
||||||
|
num_groups = planes // 8
|
||||||
|
|
||||||
|
if norm_fn == 'group':
|
||||||
|
self.norm1 = nn.GroupNorm(num_groups=num_groups, num_channels=planes)
|
||||||
|
self.norm2 = nn.GroupNorm(num_groups=num_groups, num_channels=planes)
|
||||||
|
if not stride == 1:
|
||||||
|
self.norm3 = nn.GroupNorm(num_groups=num_groups, num_channels=planes)
|
||||||
|
|
||||||
|
elif norm_fn == 'batch':
|
||||||
|
self.norm1 = nn.BatchNorm2d(planes)
|
||||||
|
self.norm2 = nn.BatchNorm2d(planes)
|
||||||
|
if not stride == 1:
|
||||||
|
self.norm3 = nn.BatchNorm2d(planes)
|
||||||
|
|
||||||
|
elif norm_fn == 'instance':
|
||||||
|
self.norm1 = nn.InstanceNorm2d(planes)
|
||||||
|
self.norm2 = nn.InstanceNorm2d(planes)
|
||||||
|
if not stride == 1:
|
||||||
|
self.norm3 = nn.InstanceNorm2d(planes)
|
||||||
|
|
||||||
|
elif norm_fn == 'none':
|
||||||
|
self.norm1 = nn.Sequential()
|
||||||
|
self.norm2 = nn.Sequential()
|
||||||
|
if not stride == 1:
|
||||||
|
self.norm3 = nn.Sequential()
|
||||||
|
|
||||||
|
if stride == 1:
|
||||||
|
self.downsample = None
|
||||||
|
|
||||||
|
else:
|
||||||
|
self.downsample = nn.Sequential(
|
||||||
|
nn.Conv2d(in_planes, planes, kernel_size=1, stride=stride), self.norm3)
|
||||||
|
|
||||||
|
|
||||||
|
def forward(self, x):
|
||||||
|
y = x
|
||||||
|
y = self.relu(self.norm1(self.conv1(y)))
|
||||||
|
y = self.relu(self.norm2(self.conv2(y)))
|
||||||
|
|
||||||
|
if self.downsample is not None:
|
||||||
|
x = self.downsample(x)
|
||||||
|
|
||||||
|
return self.relu(x+y)
|
||||||
|
|
||||||
|
|
||||||
|
class SmallEncoder(nn.Module):
|
||||||
|
def __init__(self, output_dim=128, norm_fn='batch', dropout=0.0):
|
||||||
|
super(SmallEncoder, self).__init__()
|
||||||
|
self.norm_fn = norm_fn
|
||||||
|
|
||||||
|
if self.norm_fn == 'group':
|
||||||
|
self.norm1 = nn.GroupNorm(num_groups=8, num_channels=32)
|
||||||
|
|
||||||
|
elif self.norm_fn == 'batch':
|
||||||
|
self.norm1 = nn.BatchNorm2d(32)
|
||||||
|
|
||||||
|
elif self.norm_fn == 'instance':
|
||||||
|
self.norm1 = nn.InstanceNorm2d(32)
|
||||||
|
|
||||||
|
elif self.norm_fn == 'none':
|
||||||
|
self.norm1 = nn.Sequential()
|
||||||
|
|
||||||
|
self.conv1 = nn.Conv2d(3, 32, kernel_size=7, stride=2, padding=3)
|
||||||
|
self.relu1 = nn.ReLU(inplace=True)
|
||||||
|
|
||||||
|
self.in_planes = 32
|
||||||
|
self.layer1 = self._make_layer(32, stride=1)
|
||||||
|
self.layer2 = self._make_layer(64, stride=2)
|
||||||
|
self.layer3 = self._make_layer(96, stride=2)
|
||||||
|
|
||||||
|
self.dropout = None
|
||||||
|
if dropout > 0:
|
||||||
|
self.dropout = nn.Dropout2d(p=dropout)
|
||||||
|
|
||||||
|
self.conv2 = nn.Conv2d(96, output_dim, kernel_size=1)
|
||||||
|
|
||||||
|
for m in self.modules():
|
||||||
|
if isinstance(m, nn.Conv2d):
|
||||||
|
nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
|
||||||
|
elif isinstance(m, (nn.BatchNorm2d, nn.InstanceNorm2d, nn.GroupNorm)):
|
||||||
|
if m.weight is not None:
|
||||||
|
nn.init.constant_(m.weight, 1)
|
||||||
|
if m.bias is not None:
|
||||||
|
nn.init.constant_(m.bias, 0)
|
||||||
|
|
||||||
|
def _make_layer(self, dim, stride=1):
|
||||||
|
layer1 = BottleneckBlock(self.in_planes, dim, self.norm_fn, stride=stride)
|
||||||
|
layer2 = BottleneckBlock(dim, dim, self.norm_fn, stride=1)
|
||||||
|
layers = (layer1, layer2)
|
||||||
|
|
||||||
|
self.in_planes = dim
|
||||||
|
return nn.Sequential(*layers)
|
||||||
|
|
||||||
|
|
||||||
|
def forward(self, x):
|
||||||
|
|
||||||
|
# if input is list, combine batch dimension
|
||||||
|
is_list = isinstance(x, tuple) or isinstance(x, list)
|
||||||
|
if is_list:
|
||||||
|
batch_dim = x[0].shape[0]
|
||||||
|
x = torch.cat(x, dim=0)
|
||||||
|
|
||||||
|
x = self.conv1(x)
|
||||||
|
x = self.norm1(x)
|
||||||
|
x = self.relu1(x)
|
||||||
|
|
||||||
|
x = self.layer1(x)
|
||||||
|
x = self.layer2(x)
|
||||||
|
x = self.layer3(x)
|
||||||
|
x = self.conv2(x)
|
||||||
|
|
||||||
|
if self.training and self.dropout is not None:
|
||||||
|
x = self.dropout(x)
|
||||||
|
|
||||||
|
if is_list:
|
||||||
|
x = torch.split(x, [batch_dim, batch_dim], dim=0)
|
||||||
|
|
||||||
|
return x
|
||||||
|
|
||||||
|
class BasicEncoder(nn.Module):
|
||||||
|
def __init__(self, output_dim=128, norm_fn='batch', dropout=0.0):
|
||||||
|
super(BasicEncoder, self).__init__()
|
||||||
|
self.norm_fn = norm_fn
|
||||||
|
|
||||||
|
if self.norm_fn == 'group':
|
||||||
|
self.norm1 = nn.GroupNorm(num_groups=8, num_channels=64)
|
||||||
|
|
||||||
|
elif self.norm_fn == 'batch':
|
||||||
|
self.norm1 = nn.BatchNorm2d(64)
|
||||||
|
|
||||||
|
elif self.norm_fn == 'instance':
|
||||||
|
self.norm1 = nn.InstanceNorm2d(64)
|
||||||
|
|
||||||
|
elif self.norm_fn == 'none':
|
||||||
|
self.norm1 = nn.Sequential()
|
||||||
|
|
||||||
|
self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3)
|
||||||
|
self.relu1 = nn.ReLU(inplace=True)
|
||||||
|
|
||||||
|
self.in_planes = 64
|
||||||
|
self.layer1 = self._make_layer(64, stride=1)
|
||||||
|
self.layer2 = self._make_layer(72, stride=2)
|
||||||
|
self.layer3 = self._make_layer(128, stride=2)
|
||||||
|
|
||||||
|
# output convolution
|
||||||
|
self.conv2 = nn.Conv2d(128, output_dim, kernel_size=1)
|
||||||
|
|
||||||
|
self.dropout = None
|
||||||
|
if dropout > 0:
|
||||||
|
self.dropout = nn.Dropout2d(p=dropout)
|
||||||
|
|
||||||
|
for m in self.modules():
|
||||||
|
if isinstance(m, nn.Conv2d):
|
||||||
|
nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
|
||||||
|
elif isinstance(m, (nn.BatchNorm2d, nn.InstanceNorm2d, nn.GroupNorm)):
|
||||||
|
if m.weight is not None:
|
||||||
|
nn.init.constant_(m.weight, 1)
|
||||||
|
if m.bias is not None:
|
||||||
|
nn.init.constant_(m.bias, 0)
|
||||||
|
|
||||||
|
def _make_layer(self, dim, stride=1):
|
||||||
|
layer1 = ResidualBlock(self.in_planes, dim, self.norm_fn, stride=stride)
|
||||||
|
layer2 = ResidualBlock(dim, dim, self.norm_fn, stride=1)
|
||||||
|
layers = (layer1, layer2)
|
||||||
|
|
||||||
|
self.in_planes = dim
|
||||||
|
return nn.Sequential(*layers)
|
||||||
|
|
||||||
|
|
||||||
|
def forward(self, x):
|
||||||
|
|
||||||
|
# if input is list, combine batch dimension
|
||||||
|
is_list = isinstance(x, tuple) or isinstance(x, list)
|
||||||
|
if is_list:
|
||||||
|
batch_dim = x[0].shape[0]
|
||||||
|
x = torch.cat(x, dim=0)
|
||||||
|
|
||||||
|
x = self.conv1(x)
|
||||||
|
x = self.norm1(x)
|
||||||
|
x = self.relu1(x)
|
||||||
|
|
||||||
|
x = self.layer1(x)
|
||||||
|
x = self.layer2(x)
|
||||||
|
x = self.layer3(x)
|
||||||
|
|
||||||
|
x = self.conv2(x)
|
||||||
|
|
||||||
|
if self.training and self.dropout is not None:
|
||||||
|
x = self.dropout(x)
|
||||||
|
|
||||||
|
if is_list:
|
||||||
|
x = torch.split(x, [batch_dim, batch_dim], dim=0)
|
||||||
|
|
||||||
|
return x
|
||||||
|
|
||||||
|
class LargeEncoder(nn.Module):
|
||||||
|
def __init__(self, output_dim=128, norm_fn='batch', dropout=0.0):
|
||||||
|
super(LargeEncoder, self).__init__()
|
||||||
|
self.norm_fn = norm_fn
|
||||||
|
|
||||||
|
if self.norm_fn == 'group':
|
||||||
|
self.norm1 = nn.GroupNorm(num_groups=8, num_channels=64)
|
||||||
|
|
||||||
|
elif self.norm_fn == 'batch':
|
||||||
|
self.norm1 = nn.BatchNorm2d(64)
|
||||||
|
|
||||||
|
elif self.norm_fn == 'instance':
|
||||||
|
self.norm1 = nn.InstanceNorm2d(64)
|
||||||
|
|
||||||
|
elif self.norm_fn == 'none':
|
||||||
|
self.norm1 = nn.Sequential()
|
||||||
|
|
||||||
|
self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3)
|
||||||
|
self.relu1 = nn.ReLU(inplace=True)
|
||||||
|
|
||||||
|
self.in_planes = 64
|
||||||
|
self.layer1 = self._make_layer(64, stride=1)
|
||||||
|
self.layer2 = self._make_layer(112, stride=2)
|
||||||
|
self.layer3 = self._make_layer(160, stride=2)
|
||||||
|
self.layer3_2 = self._make_layer(160, stride=1)
|
||||||
|
|
||||||
|
# output convolution
|
||||||
|
self.conv2 = nn.Conv2d(self.in_planes, output_dim, kernel_size=1)
|
||||||
|
|
||||||
|
self.dropout = None
|
||||||
|
if dropout > 0:
|
||||||
|
self.dropout = nn.Dropout2d(p=dropout)
|
||||||
|
|
||||||
|
for m in self.modules():
|
||||||
|
if isinstance(m, nn.Conv2d):
|
||||||
|
nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
|
||||||
|
elif isinstance(m, (nn.BatchNorm2d, nn.InstanceNorm2d, nn.GroupNorm)):
|
||||||
|
if m.weight is not None:
|
||||||
|
nn.init.constant_(m.weight, 1)
|
||||||
|
if m.bias is not None:
|
||||||
|
nn.init.constant_(m.bias, 0)
|
||||||
|
|
||||||
|
def _make_layer(self, dim, stride=1):
|
||||||
|
layer1 = ResidualBlock(self.in_planes, dim, self.norm_fn, stride=stride)
|
||||||
|
layer2 = ResidualBlock(dim, dim, self.norm_fn, stride=1)
|
||||||
|
layers = (layer1, layer2)
|
||||||
|
|
||||||
|
self.in_planes = dim
|
||||||
|
return nn.Sequential(*layers)
|
||||||
|
|
||||||
|
|
||||||
|
def forward(self, x):
|
||||||
|
|
||||||
|
# if input is list, combine batch dimension
|
||||||
|
is_list = isinstance(x, tuple) or isinstance(x, list)
|
||||||
|
if is_list:
|
||||||
|
batch_dim = x[0].shape[0]
|
||||||
|
x = torch.cat(x, dim=0)
|
||||||
|
|
||||||
|
x = self.conv1(x)
|
||||||
|
x = self.norm1(x)
|
||||||
|
x = self.relu1(x)
|
||||||
|
|
||||||
|
x = self.layer1(x)
|
||||||
|
x = self.layer2(x)
|
||||||
|
x = self.layer3(x)
|
||||||
|
x = self.layer3_2(x)
|
||||||
|
|
||||||
|
x = self.conv2(x)
|
||||||
|
|
||||||
|
if self.training and self.dropout is not None:
|
||||||
|
x = self.dropout(x)
|
||||||
|
|
||||||
|
if is_list:
|
||||||
|
x = torch.split(x, [batch_dim, batch_dim], dim=0)
|
||||||
|
|
||||||
|
return x
|
||||||
111
networks/blocks/ifrnet.py
Executable file
111
networks/blocks/ifrnet.py
Executable file
@@ -0,0 +1,111 @@
|
|||||||
|
import torch
|
||||||
|
import torch.nn as nn
|
||||||
|
import torch.nn.functional as F
|
||||||
|
from src.utils.flow_utils import warp
|
||||||
|
|
||||||
|
|
||||||
|
def resize(x, scale_factor):
|
||||||
|
return F.interpolate(x, scale_factor=scale_factor, mode="bilinear", align_corners=False)
|
||||||
|
|
||||||
|
def convrelu(in_channels, out_channels, kernel_size=3, stride=1, padding=1, dilation=1, groups=1, bias=True):
|
||||||
|
return nn.Sequential(
|
||||||
|
nn.Conv2d(in_channels, out_channels, kernel_size, stride, padding, dilation, groups, bias=bias),
|
||||||
|
nn.PReLU(out_channels)
|
||||||
|
)
|
||||||
|
|
||||||
|
class ResBlock(nn.Module):
|
||||||
|
def __init__(self, in_channels, side_channels, bias=True):
|
||||||
|
super(ResBlock, self).__init__()
|
||||||
|
self.side_channels = side_channels
|
||||||
|
self.conv1 = nn.Sequential(
|
||||||
|
nn.Conv2d(in_channels, in_channels, kernel_size=3, stride=1, padding=1, bias=bias),
|
||||||
|
nn.PReLU(in_channels)
|
||||||
|
)
|
||||||
|
self.conv2 = nn.Sequential(
|
||||||
|
nn.Conv2d(side_channels, side_channels, kernel_size=3, stride=1, padding=1, bias=bias),
|
||||||
|
nn.PReLU(side_channels)
|
||||||
|
)
|
||||||
|
self.conv3 = nn.Sequential(
|
||||||
|
nn.Conv2d(in_channels, in_channels, kernel_size=3, stride=1, padding=1, bias=bias),
|
||||||
|
nn.PReLU(in_channels)
|
||||||
|
)
|
||||||
|
self.conv4 = nn.Sequential(
|
||||||
|
nn.Conv2d(side_channels, side_channels, kernel_size=3, stride=1, padding=1, bias=bias),
|
||||||
|
nn.PReLU(side_channels)
|
||||||
|
)
|
||||||
|
self.conv5 = nn.Conv2d(in_channels, in_channels, kernel_size=3, stride=1, padding=1, bias=bias)
|
||||||
|
self.prelu = nn.PReLU(in_channels)
|
||||||
|
|
||||||
|
def forward(self, x):
|
||||||
|
out = self.conv1(x)
|
||||||
|
|
||||||
|
res_feat = out[:, :-self.side_channels, ...]
|
||||||
|
side_feat = out[:, -self.side_channels:, :, :]
|
||||||
|
side_feat = self.conv2(side_feat)
|
||||||
|
out = self.conv3(torch.cat([res_feat, side_feat], 1))
|
||||||
|
|
||||||
|
res_feat = out[:, :-self.side_channels, ...]
|
||||||
|
side_feat = out[:, -self.side_channels:, :, :]
|
||||||
|
side_feat = self.conv4(side_feat)
|
||||||
|
out = self.conv5(torch.cat([res_feat, side_feat], 1))
|
||||||
|
|
||||||
|
out = self.prelu(x + out)
|
||||||
|
return out
|
||||||
|
|
||||||
|
class Encoder(nn.Module):
|
||||||
|
def __init__(self, channels, large=False):
|
||||||
|
super(Encoder, self).__init__()
|
||||||
|
self.channels = channels
|
||||||
|
prev_ch = 3
|
||||||
|
for idx, ch in enumerate(channels, 1):
|
||||||
|
k = 7 if large and idx == 1 else 3
|
||||||
|
p = 3 if k ==7 else 1
|
||||||
|
self.register_module(f'pyramid{idx}',
|
||||||
|
nn.Sequential(
|
||||||
|
convrelu(prev_ch, ch, k, 2, p),
|
||||||
|
convrelu(ch, ch, 3, 1, 1)
|
||||||
|
))
|
||||||
|
prev_ch = ch
|
||||||
|
|
||||||
|
def forward(self, in_x):
|
||||||
|
fs = []
|
||||||
|
for idx in range(len(self.channels)):
|
||||||
|
out_x = getattr(self, f'pyramid{idx+1}')(in_x)
|
||||||
|
fs.append(out_x)
|
||||||
|
in_x = out_x
|
||||||
|
return fs
|
||||||
|
|
||||||
|
class InitDecoder(nn.Module):
|
||||||
|
def __init__(self, in_ch, out_ch, skip_ch) -> None:
|
||||||
|
super().__init__()
|
||||||
|
self.convblock = nn.Sequential(
|
||||||
|
convrelu(in_ch*2+1, in_ch*2),
|
||||||
|
ResBlock(in_ch*2, skip_ch),
|
||||||
|
nn.ConvTranspose2d(in_ch*2, out_ch+4, 4, 2, 1, bias=True)
|
||||||
|
)
|
||||||
|
def forward(self, f0, f1, embt):
|
||||||
|
h, w = f0.shape[2:]
|
||||||
|
embt = embt.repeat(1, 1, h, w)
|
||||||
|
out = self.convblock(torch.cat([f0, f1, embt], 1))
|
||||||
|
flow0, flow1 = torch.chunk(out[:, :4, ...], 2, 1)
|
||||||
|
ft_ = out[:, 4:, ...]
|
||||||
|
return flow0, flow1, ft_
|
||||||
|
|
||||||
|
class IntermediateDecoder(nn.Module):
|
||||||
|
def __init__(self, in_ch, out_ch, skip_ch) -> None:
|
||||||
|
super().__init__()
|
||||||
|
self.convblock = nn.Sequential(
|
||||||
|
convrelu(in_ch*3+4, in_ch*3),
|
||||||
|
ResBlock(in_ch*3, skip_ch),
|
||||||
|
nn.ConvTranspose2d(in_ch*3, out_ch+4, 4, 2, 1, bias=True)
|
||||||
|
)
|
||||||
|
def forward(self, ft_, f0, f1, flow0_in, flow1_in):
|
||||||
|
f0_warp = warp(f0, flow0_in)
|
||||||
|
f1_warp = warp(f1, flow1_in)
|
||||||
|
f_in = torch.cat([ft_, f0_warp, f1_warp, flow0_in, flow1_in], 1)
|
||||||
|
out = self.convblock(f_in)
|
||||||
|
flow0, flow1 = torch.chunk(out[:, :4, ...], 2, 1)
|
||||||
|
ft_ = out[:, 4:, ...]
|
||||||
|
flow0 = flow0 + 2.0 * resize(flow0_in, scale_factor=2.0)
|
||||||
|
flow1 = flow1 + 2.0 * resize(flow1_in, scale_factor=2.0)
|
||||||
|
return flow0, flow1, ft_
|
||||||
69
networks/blocks/multi_flow.py
Executable file
69
networks/blocks/multi_flow.py
Executable file
@@ -0,0 +1,69 @@
|
|||||||
|
import torch
|
||||||
|
import torch.nn as nn
|
||||||
|
from src.utils.flow_utils import warp
|
||||||
|
from networks.blocks.ifrnet import (
|
||||||
|
convrelu, resize,
|
||||||
|
ResBlock,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def multi_flow_combine(comb_block, img0, img1, flow0, flow1,
|
||||||
|
mask=None, img_res=None, mean=None):
|
||||||
|
'''
|
||||||
|
A parallel implementation of multiple flow field warping
|
||||||
|
comb_block: An nn.Seqential object.
|
||||||
|
img shape: [b, c, h, w]
|
||||||
|
flow shape: [b, 2*num_flows, h, w]
|
||||||
|
mask (opt):
|
||||||
|
If 'mask' is None, the function conduct a simple average.
|
||||||
|
img_res (opt):
|
||||||
|
If 'img_res' is None, the function adds zero instead.
|
||||||
|
mean (opt):
|
||||||
|
If 'mean' is None, the function adds zero instead.
|
||||||
|
'''
|
||||||
|
b, c, h, w = flow0.shape
|
||||||
|
num_flows = c // 2
|
||||||
|
flow0 = flow0.reshape(b, num_flows, 2, h, w).reshape(-1, 2, h, w)
|
||||||
|
flow1 = flow1.reshape(b, num_flows, 2, h, w).reshape(-1, 2, h, w)
|
||||||
|
|
||||||
|
mask = mask.reshape(b, num_flows, 1, h, w
|
||||||
|
).reshape(-1, 1, h, w) if mask is not None else None
|
||||||
|
img_res = img_res.reshape(b, num_flows, 3, h, w
|
||||||
|
).reshape(-1, 3, h, w) if img_res is not None else 0
|
||||||
|
img0 = torch.stack([img0] * num_flows, 1).reshape(-1, 3, h, w)
|
||||||
|
img1 = torch.stack([img1] * num_flows, 1).reshape(-1, 3, h, w)
|
||||||
|
mean = torch.stack([mean] * num_flows, 1).reshape(-1, 1, 1, 1
|
||||||
|
) if mean is not None else 0
|
||||||
|
|
||||||
|
img0_warp = warp(img0, flow0)
|
||||||
|
img1_warp = warp(img1, flow1)
|
||||||
|
img_warps = mask * img0_warp + (1 - mask) * img1_warp + mean + img_res
|
||||||
|
img_warps = img_warps.reshape(b, num_flows, 3, h, w)
|
||||||
|
imgt_pred = img_warps.mean(1) + comb_block(img_warps.view(b, -1, h, w))
|
||||||
|
return imgt_pred
|
||||||
|
|
||||||
|
|
||||||
|
class MultiFlowDecoder(nn.Module):
|
||||||
|
def __init__(self, in_ch, skip_ch, num_flows=3):
|
||||||
|
super(MultiFlowDecoder, self).__init__()
|
||||||
|
self.num_flows = num_flows
|
||||||
|
self.convblock = nn.Sequential(
|
||||||
|
convrelu(in_ch*3+4, in_ch*3),
|
||||||
|
ResBlock(in_ch*3, skip_ch),
|
||||||
|
nn.ConvTranspose2d(in_ch*3, 8*num_flows, 4, 2, 1, bias=True)
|
||||||
|
)
|
||||||
|
|
||||||
|
def forward(self, ft_, f0, f1, flow0, flow1):
|
||||||
|
n = self.num_flows
|
||||||
|
f0_warp = warp(f0, flow0)
|
||||||
|
f1_warp = warp(f1, flow1)
|
||||||
|
out = self.convblock(torch.cat([ft_, f0_warp, f1_warp, flow0, flow1], 1))
|
||||||
|
delta_flow0, delta_flow1, mask, img_res = torch.split(out, [2*n, 2*n, n, 3*n], 1)
|
||||||
|
mask = torch.sigmoid(mask)
|
||||||
|
|
||||||
|
flow0 = delta_flow0 + 2.0 * resize(flow0, scale_factor=2.0
|
||||||
|
).repeat(1, self.num_flows, 1, 1)
|
||||||
|
flow1 = delta_flow1 + 2.0 * resize(flow1, scale_factor=2.0
|
||||||
|
).repeat(1, self.num_flows, 1, 1)
|
||||||
|
|
||||||
|
return flow0, flow1, mask, img_res
|
||||||
207
networks/blocks/raft.py
Executable file
207
networks/blocks/raft.py
Executable file
@@ -0,0 +1,207 @@
|
|||||||
|
import torch
|
||||||
|
import torch.nn as nn
|
||||||
|
import torch.nn.functional as F
|
||||||
|
|
||||||
|
|
||||||
|
def resize(x, scale_factor):
|
||||||
|
return F.interpolate(x, scale_factor=scale_factor, mode="bilinear", align_corners=False)
|
||||||
|
|
||||||
|
|
||||||
|
def bilinear_sampler(img, coords, mask=False):
|
||||||
|
""" Wrapper for grid_sample, uses pixel coordinates """
|
||||||
|
H, W = img.shape[-2:]
|
||||||
|
xgrid, ygrid = coords.split([1,1], dim=-1)
|
||||||
|
xgrid = 2*xgrid/(W-1) - 1
|
||||||
|
ygrid = 2*ygrid/(H-1) - 1
|
||||||
|
|
||||||
|
grid = torch.cat([xgrid, ygrid], dim=-1)
|
||||||
|
img = F.grid_sample(img, grid, align_corners=True)
|
||||||
|
|
||||||
|
if mask:
|
||||||
|
mask = (xgrid > -1) & (ygrid > -1) & (xgrid < 1) & (ygrid < 1)
|
||||||
|
return img, mask.float()
|
||||||
|
|
||||||
|
return img
|
||||||
|
|
||||||
|
|
||||||
|
def coords_grid(batch, ht, wd, device):
|
||||||
|
coords = torch.meshgrid(torch.arange(ht, device=device),
|
||||||
|
torch.arange(wd, device=device),
|
||||||
|
indexing='ij')
|
||||||
|
coords = torch.stack(coords[::-1], dim=0).float()
|
||||||
|
return coords[None].repeat(batch, 1, 1, 1)
|
||||||
|
|
||||||
|
|
||||||
|
class SmallUpdateBlock(nn.Module):
|
||||||
|
def __init__(self, cdim, hidden_dim, flow_dim, corr_dim, fc_dim,
|
||||||
|
corr_levels=4, radius=3, scale_factor=None):
|
||||||
|
super(SmallUpdateBlock, self).__init__()
|
||||||
|
cor_planes = corr_levels * (2 * radius + 1) **2
|
||||||
|
self.scale_factor = scale_factor
|
||||||
|
|
||||||
|
self.convc1 = nn.Conv2d(2 * cor_planes, corr_dim, 1, padding=0)
|
||||||
|
self.convf1 = nn.Conv2d(4, flow_dim*2, 7, padding=3)
|
||||||
|
self.convf2 = nn.Conv2d(flow_dim*2, flow_dim, 3, padding=1)
|
||||||
|
self.conv = nn.Conv2d(corr_dim+flow_dim, fc_dim, 3, padding=1)
|
||||||
|
|
||||||
|
self.gru = nn.Sequential(
|
||||||
|
nn.Conv2d(fc_dim+4+cdim, hidden_dim, 3, padding=1),
|
||||||
|
nn.LeakyReLU(negative_slope=0.1, inplace=True),
|
||||||
|
nn.Conv2d(hidden_dim, hidden_dim, 3, padding=1),
|
||||||
|
)
|
||||||
|
|
||||||
|
self.feat_head = nn.Sequential(
|
||||||
|
nn.Conv2d(hidden_dim, hidden_dim, 3, padding=1),
|
||||||
|
nn.LeakyReLU(negative_slope=0.1, inplace=True),
|
||||||
|
nn.Conv2d(hidden_dim, cdim, 3, padding=1),
|
||||||
|
)
|
||||||
|
|
||||||
|
self.flow_head = nn.Sequential(
|
||||||
|
nn.Conv2d(hidden_dim, hidden_dim, 3, padding=1),
|
||||||
|
nn.LeakyReLU(negative_slope=0.1, inplace=True),
|
||||||
|
nn.Conv2d(hidden_dim, 4, 3, padding=1),
|
||||||
|
)
|
||||||
|
|
||||||
|
self.lrelu = nn.LeakyReLU(negative_slope=0.1, inplace=True)
|
||||||
|
|
||||||
|
def forward(self, net, flow, corr):
|
||||||
|
net = resize(net, 1 / self.scale_factor
|
||||||
|
) if self.scale_factor is not None else net
|
||||||
|
cor = self.lrelu(self.convc1(corr))
|
||||||
|
flo = self.lrelu(self.convf1(flow))
|
||||||
|
flo = self.lrelu(self.convf2(flo))
|
||||||
|
cor_flo = torch.cat([cor, flo], dim=1)
|
||||||
|
inp = self.lrelu(self.conv(cor_flo))
|
||||||
|
inp = torch.cat([inp, flow, net], dim=1)
|
||||||
|
|
||||||
|
out = self.gru(inp)
|
||||||
|
delta_net = self.feat_head(out)
|
||||||
|
delta_flow = self.flow_head(out)
|
||||||
|
|
||||||
|
if self.scale_factor is not None:
|
||||||
|
delta_net = resize(delta_net, scale_factor=self.scale_factor)
|
||||||
|
delta_flow = self.scale_factor * resize(delta_flow, scale_factor=self.scale_factor)
|
||||||
|
|
||||||
|
return delta_net, delta_flow
|
||||||
|
|
||||||
|
|
||||||
|
class BasicUpdateBlock(nn.Module):
|
||||||
|
def __init__(self, cdim, hidden_dim, flow_dim, corr_dim, corr_dim2,
|
||||||
|
fc_dim, corr_levels=4, radius=3, scale_factor=None, out_num=1):
|
||||||
|
super(BasicUpdateBlock, self).__init__()
|
||||||
|
cor_planes = corr_levels * (2 * radius + 1) **2
|
||||||
|
|
||||||
|
self.scale_factor = scale_factor
|
||||||
|
self.convc1 = nn.Conv2d(2 * cor_planes, corr_dim, 1, padding=0)
|
||||||
|
self.convc2 = nn.Conv2d(corr_dim, corr_dim2, 3, padding=1)
|
||||||
|
self.convf1 = nn.Conv2d(4, flow_dim*2, 7, padding=3)
|
||||||
|
self.convf2 = nn.Conv2d(flow_dim*2, flow_dim, 3, padding=1)
|
||||||
|
self.conv = nn.Conv2d(flow_dim+corr_dim2, fc_dim, 3, padding=1)
|
||||||
|
|
||||||
|
self.gru = nn.Sequential(
|
||||||
|
nn.Conv2d(fc_dim+4+cdim, hidden_dim, 3, padding=1),
|
||||||
|
nn.LeakyReLU(negative_slope=0.1, inplace=True),
|
||||||
|
nn.Conv2d(hidden_dim, hidden_dim, 3, padding=1),
|
||||||
|
)
|
||||||
|
|
||||||
|
self.feat_head = nn.Sequential(
|
||||||
|
nn.Conv2d(hidden_dim, hidden_dim, 3, padding=1),
|
||||||
|
nn.LeakyReLU(negative_slope=0.1, inplace=True),
|
||||||
|
nn.Conv2d(hidden_dim, cdim, 3, padding=1),
|
||||||
|
)
|
||||||
|
|
||||||
|
self.flow_head = nn.Sequential(
|
||||||
|
nn.Conv2d(hidden_dim, hidden_dim, 3, padding=1),
|
||||||
|
nn.LeakyReLU(negative_slope=0.1, inplace=True),
|
||||||
|
nn.Conv2d(hidden_dim, 4*out_num, 3, padding=1),
|
||||||
|
)
|
||||||
|
|
||||||
|
self.lrelu = nn.LeakyReLU(negative_slope=0.1, inplace=True)
|
||||||
|
|
||||||
|
def forward(self, net, flow, corr):
|
||||||
|
net = resize(net, 1 / self.scale_factor
|
||||||
|
) if self.scale_factor is not None else net
|
||||||
|
cor = self.lrelu(self.convc1(corr))
|
||||||
|
cor = self.lrelu(self.convc2(cor))
|
||||||
|
flo = self.lrelu(self.convf1(flow))
|
||||||
|
flo = self.lrelu(self.convf2(flo))
|
||||||
|
cor_flo = torch.cat([cor, flo], dim=1)
|
||||||
|
inp = self.lrelu(self.conv(cor_flo))
|
||||||
|
inp = torch.cat([inp, flow, net], dim=1)
|
||||||
|
|
||||||
|
out = self.gru(inp)
|
||||||
|
delta_net = self.feat_head(out)
|
||||||
|
delta_flow = self.flow_head(out)
|
||||||
|
|
||||||
|
if self.scale_factor is not None:
|
||||||
|
delta_net = resize(delta_net, scale_factor=self.scale_factor)
|
||||||
|
delta_flow = self.scale_factor * resize(delta_flow, scale_factor=self.scale_factor)
|
||||||
|
return delta_net, delta_flow
|
||||||
|
|
||||||
|
|
||||||
|
class BidirCorrBlock:
|
||||||
|
def __init__(self, fmap1, fmap2, num_levels=4, radius=4):
|
||||||
|
self.num_levels = num_levels
|
||||||
|
self.radius = radius
|
||||||
|
self.corr_pyramid = []
|
||||||
|
self.corr_pyramid_T = []
|
||||||
|
|
||||||
|
corr = BidirCorrBlock.corr(fmap1, fmap2)
|
||||||
|
batch, h1, w1, dim, h2, w2 = corr.shape
|
||||||
|
corr_T = corr.clone().permute(0, 4, 5, 3, 1, 2)
|
||||||
|
|
||||||
|
corr = corr.reshape(batch*h1*w1, dim, h2, w2)
|
||||||
|
corr_T = corr_T.reshape(batch*h2*w2, dim, h1, w1)
|
||||||
|
|
||||||
|
self.corr_pyramid.append(corr)
|
||||||
|
self.corr_pyramid_T.append(corr_T)
|
||||||
|
|
||||||
|
for _ in range(self.num_levels-1):
|
||||||
|
corr = F.avg_pool2d(corr, 2, stride=2)
|
||||||
|
corr_T = F.avg_pool2d(corr_T, 2, stride=2)
|
||||||
|
self.corr_pyramid.append(corr)
|
||||||
|
self.corr_pyramid_T.append(corr_T)
|
||||||
|
|
||||||
|
def __call__(self, coords0, coords1):
|
||||||
|
r = self.radius
|
||||||
|
coords0 = coords0.permute(0, 2, 3, 1)
|
||||||
|
coords1 = coords1.permute(0, 2, 3, 1)
|
||||||
|
assert coords0.shape == coords1.shape, f"coords0 shape: [{coords0.shape}] is not equal to [{coords1.shape}]"
|
||||||
|
batch, h1, w1, _ = coords0.shape
|
||||||
|
|
||||||
|
out_pyramid = []
|
||||||
|
out_pyramid_T = []
|
||||||
|
for i in range(self.num_levels):
|
||||||
|
corr = self.corr_pyramid[i]
|
||||||
|
corr_T = self.corr_pyramid_T[i]
|
||||||
|
|
||||||
|
dx = torch.linspace(-r, r, 2*r+1, device=coords0.device)
|
||||||
|
dy = torch.linspace(-r, r, 2*r+1, device=coords0.device)
|
||||||
|
delta = torch.stack(torch.meshgrid(dy, dx, indexing='ij'), axis=-1)
|
||||||
|
delta_lvl = delta.view(1, 2*r+1, 2*r+1, 2)
|
||||||
|
|
||||||
|
centroid_lvl_0 = coords0.reshape(batch*h1*w1, 1, 1, 2) / 2**i
|
||||||
|
centroid_lvl_1 = coords1.reshape(batch*h1*w1, 1, 1, 2) / 2**i
|
||||||
|
coords_lvl_0 = centroid_lvl_0 + delta_lvl
|
||||||
|
coords_lvl_1 = centroid_lvl_1 + delta_lvl
|
||||||
|
|
||||||
|
corr = bilinear_sampler(corr, coords_lvl_0)
|
||||||
|
corr_T = bilinear_sampler(corr_T, coords_lvl_1)
|
||||||
|
corr = corr.view(batch, h1, w1, -1)
|
||||||
|
corr_T = corr_T.view(batch, h1, w1, -1)
|
||||||
|
out_pyramid.append(corr)
|
||||||
|
out_pyramid_T.append(corr_T)
|
||||||
|
|
||||||
|
out = torch.cat(out_pyramid, dim=-1)
|
||||||
|
out_T = torch.cat(out_pyramid_T, dim=-1)
|
||||||
|
return out.permute(0, 3, 1, 2).contiguous().float(), out_T.permute(0, 3, 1, 2).contiguous().float()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def corr(fmap1, fmap2):
|
||||||
|
batch, dim, ht, wd = fmap1.shape
|
||||||
|
fmap1 = fmap1.view(batch, dim, ht*wd)
|
||||||
|
fmap2 = fmap2.view(batch, dim, ht*wd)
|
||||||
|
|
||||||
|
corr = torch.matmul(fmap1.transpose(1,2), fmap2)
|
||||||
|
corr = corr.view(batch, ht, wd, 1, ht, wd)
|
||||||
|
return corr / torch.sqrt(torch.tensor(dim).float())
|
||||||
12
pyproject.toml
Normal file
12
pyproject.toml
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
[project]
|
||||||
|
name = "amt-apple"
|
||||||
|
version = "0.1.0"
|
||||||
|
description = "Add your description here"
|
||||||
|
readme = "README.md"
|
||||||
|
requires-python = ">=3.12"
|
||||||
|
dependencies = [
|
||||||
|
"imageio>=2.37.3",
|
||||||
|
"numpy>=2.4.4",
|
||||||
|
"omegaconf>=2.3.0",
|
||||||
|
"torch>=2.11.0",
|
||||||
|
]
|
||||||
62
src/config/AMT-G.yaml
Executable file
62
src/config/AMT-G.yaml
Executable file
@@ -0,0 +1,62 @@
|
|||||||
|
exp_name: floloss1e-2_300epoch_bs24_lr1p5e-4
|
||||||
|
seed: 2023
|
||||||
|
epochs: 300
|
||||||
|
distributed: true
|
||||||
|
lr: 1.5e-4
|
||||||
|
lr_min: 2e-5
|
||||||
|
weight_decay: 0.0
|
||||||
|
resume_state: null
|
||||||
|
save_dir: work_dir
|
||||||
|
eval_interval: 1
|
||||||
|
|
||||||
|
network:
|
||||||
|
name: networks.AMT-G.Model
|
||||||
|
params:
|
||||||
|
corr_radius: 3
|
||||||
|
corr_lvls: 4
|
||||||
|
num_flows: 5
|
||||||
|
data:
|
||||||
|
train:
|
||||||
|
name: datasets.vimeo_datasets.Vimeo90K_Train_Dataset
|
||||||
|
params:
|
||||||
|
dataset_dir: data/vimeo_triplet
|
||||||
|
val:
|
||||||
|
name: datasets.vimeo_datasets.Vimeo90K_Test_Dataset
|
||||||
|
params:
|
||||||
|
dataset_dir: data/vimeo_triplet
|
||||||
|
train_loader:
|
||||||
|
batch_size: 24
|
||||||
|
num_workers: 12
|
||||||
|
val_loader:
|
||||||
|
batch_size: 24
|
||||||
|
num_workers: 3
|
||||||
|
|
||||||
|
logger:
|
||||||
|
use_wandb: true
|
||||||
|
resume_id: null
|
||||||
|
|
||||||
|
losses:
|
||||||
|
- {
|
||||||
|
name: losses.loss.CharbonnierLoss,
|
||||||
|
nickname: l_rec,
|
||||||
|
params: {
|
||||||
|
loss_weight: 1.0,
|
||||||
|
keys: [imgt_pred, imgt]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
- {
|
||||||
|
name: losses.loss.TernaryLoss,
|
||||||
|
nickname: l_ter,
|
||||||
|
params: {
|
||||||
|
loss_weight: 1.0,
|
||||||
|
keys: [imgt_pred, imgt]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
- {
|
||||||
|
name: losses.loss.MultipleFlowLoss,
|
||||||
|
nickname: l_flo,
|
||||||
|
params: {
|
||||||
|
loss_weight: 0.005,
|
||||||
|
keys: [flow0_pred, flow1_pred, flow]
|
||||||
|
}
|
||||||
|
}
|
||||||
63
src/config/AMT-S.yaml
Executable file
63
src/config/AMT-S.yaml
Executable file
@@ -0,0 +1,63 @@
|
|||||||
|
exp_name: floloss1e-2_300epoch_bs24_lr2e-4
|
||||||
|
seed: 2023
|
||||||
|
epochs: 300
|
||||||
|
distributed: true
|
||||||
|
lr: 2e-4
|
||||||
|
lr_min: 2e-5
|
||||||
|
weight_decay: 0.0
|
||||||
|
resume_state: null
|
||||||
|
save_dir: work_dir
|
||||||
|
eval_interval: 1
|
||||||
|
|
||||||
|
network:
|
||||||
|
name: networks.AMT-S.Model
|
||||||
|
params:
|
||||||
|
corr_radius: 3
|
||||||
|
corr_lvls: 4
|
||||||
|
num_flows: 3
|
||||||
|
|
||||||
|
data:
|
||||||
|
train:
|
||||||
|
name: datasets.vimeo_datasets.Vimeo90K_Train_Dataset
|
||||||
|
params:
|
||||||
|
dataset_dir: data/vimeo_triplet
|
||||||
|
val:
|
||||||
|
name: datasets.vimeo_datasets.Vimeo90K_Test_Dataset
|
||||||
|
params:
|
||||||
|
dataset_dir: data/vimeo_triplet
|
||||||
|
train_loader:
|
||||||
|
batch_size: 24
|
||||||
|
num_workers: 12
|
||||||
|
val_loader:
|
||||||
|
batch_size: 24
|
||||||
|
num_workers: 3
|
||||||
|
|
||||||
|
logger:
|
||||||
|
use_wandb: false
|
||||||
|
resume_id: null
|
||||||
|
|
||||||
|
losses:
|
||||||
|
- {
|
||||||
|
name: losses.loss.CharbonnierLoss,
|
||||||
|
nickname: l_rec,
|
||||||
|
params: {
|
||||||
|
loss_weight: 1.0,
|
||||||
|
keys: [imgt_pred, imgt]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
- {
|
||||||
|
name: losses.loss.TernaryLoss,
|
||||||
|
nickname: l_ter,
|
||||||
|
params: {
|
||||||
|
loss_weight: 1.0,
|
||||||
|
keys: [imgt_pred, imgt]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
- {
|
||||||
|
name: losses.loss.MultipleFlowLoss,
|
||||||
|
nickname: l_flo,
|
||||||
|
params: {
|
||||||
|
loss_weight: 0.002,
|
||||||
|
keys: [flow0_pred, flow1_pred, flow]
|
||||||
|
}
|
||||||
|
}
|
||||||
BIN
src/pretrained/amt-g.pth
Normal file
BIN
src/pretrained/amt-g.pth
Normal file
Binary file not shown.
15
src/utils/build.py
Normal file
15
src/utils/build.py
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
from typing import TYPE_CHECKING
|
||||||
|
import importlib
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from omegaconf import DictConfig
|
||||||
|
|
||||||
|
|
||||||
|
def base_build_fn(module: str, cls: str, params: dict):
|
||||||
|
return getattr(importlib.import_module(module, package=None), cls)(**params)
|
||||||
|
|
||||||
|
|
||||||
|
def build_from_cfg(config: "DictConfig"):
|
||||||
|
module, cls = config["name"].rsplit(".", 1)
|
||||||
|
params: dict = config.get("params", {})
|
||||||
|
return base_build_fn(module, cls, params)
|
||||||
25
src/utils/flow_utils.py
Normal file
25
src/utils/flow_utils.py
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
import torch
|
||||||
|
import torch.nn.functional as F
|
||||||
|
|
||||||
|
|
||||||
|
def warp(img, flow):
|
||||||
|
B, _, H, W = flow.shape
|
||||||
|
xx = torch.linspace(-1.0, 1.0, W).view(1, 1, 1, W).expand(B, -1, H, -1)
|
||||||
|
yy = torch.linspace(-1.0, 1.0, H).view(1, 1, H, 1).expand(B, -1, -1, W)
|
||||||
|
grid = torch.cat([xx, yy], 1).to(img)
|
||||||
|
flow_ = torch.cat(
|
||||||
|
[
|
||||||
|
flow[:, 0:1, :, :] / ((W - 1.0) / 2.0),
|
||||||
|
flow[:, 1:2, :, :] / ((H - 1.0) / 2.0),
|
||||||
|
],
|
||||||
|
1,
|
||||||
|
)
|
||||||
|
grid_ = (grid + flow_).permute(0, 2, 3, 1)
|
||||||
|
output = F.grid_sample(
|
||||||
|
input=img,
|
||||||
|
grid=grid_,
|
||||||
|
mode="bilinear",
|
||||||
|
padding_mode="border",
|
||||||
|
align_corners=True,
|
||||||
|
)
|
||||||
|
return output
|
||||||
35
src/utils/padder.py
Normal file
35
src/utils/padder.py
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
import torch.nn.functional as F
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
import torch
|
||||||
|
|
||||||
|
|
||||||
|
class InputPadder:
|
||||||
|
"""Pads images such that dimensions are divisible by divisor"""
|
||||||
|
|
||||||
|
def __init__(self, dims: "torch.Size", divisor=16):
|
||||||
|
self.ht, self.wd = dims[-2:]
|
||||||
|
pad_ht = (((self.ht // divisor) + 1) * divisor - self.ht) % divisor
|
||||||
|
pad_wd = (((self.wd // divisor) + 1) * divisor - self.wd) % divisor
|
||||||
|
self._pad = [
|
||||||
|
pad_wd // 2,
|
||||||
|
pad_wd - pad_wd // 2,
|
||||||
|
pad_ht // 2,
|
||||||
|
pad_ht - pad_ht // 2,
|
||||||
|
]
|
||||||
|
|
||||||
|
def pad(self, *inputs: "torch.Tensor"):
|
||||||
|
if len(inputs) == 1:
|
||||||
|
return F.pad(inputs[0], self._pad, mode="replicate")
|
||||||
|
else:
|
||||||
|
return [F.pad(x, self._pad, mode="replicate") for x in inputs]
|
||||||
|
|
||||||
|
def unpad(self, *inputs: "torch.Tensor"):
|
||||||
|
return [self._unpad(x) for x in inputs]
|
||||||
|
|
||||||
|
def _unpad(self, x: "torch.Tensor"):
|
||||||
|
ht, wd = x.shape[-2:]
|
||||||
|
c = [self._pad[2], ht - self._pad[3], self._pad[0], wd - self._pad[1]]
|
||||||
|
return x[..., c[0] : c[1], c[2] : c[3]]
|
||||||
56
src/utils/torch.py
Normal file
56
src/utils/torch.py
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
import logging
|
||||||
|
|
||||||
|
import torch
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
|
|
||||||
|
def tensor2img(tensor: torch.Tensor):
|
||||||
|
return (
|
||||||
|
(tensor * 255.0)
|
||||||
|
.detach()
|
||||||
|
.squeeze(0)
|
||||||
|
.permute(1, 2, 0)
|
||||||
|
.cpu()
|
||||||
|
.numpy()
|
||||||
|
.clip(0, 255)
|
||||||
|
.astype(np.uint8)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def img2tensor(img: np.ndarray) -> torch.Tensor:
|
||||||
|
logging.debug(f"Converting image of shape {img.shape} to tensor")
|
||||||
|
if img.shape[-1] > 3:
|
||||||
|
img = img[:, :, :3]
|
||||||
|
return torch.tensor(img).permute(2, 0, 1).unsqueeze(0) / 255.0
|
||||||
|
|
||||||
|
|
||||||
|
def check_dim_and_resize(*args: torch.Tensor) -> list[torch.Tensor]:
|
||||||
|
logging.debug("Checking dimensions of input tensors")
|
||||||
|
shape_list = []
|
||||||
|
result = list(args)
|
||||||
|
for t in args:
|
||||||
|
logging.debug(f"Tensor shape: {t.shape}")
|
||||||
|
shape_list.append(t.shape[2:])
|
||||||
|
|
||||||
|
if len(set(shape_list)) > 1:
|
||||||
|
logging.warning(
|
||||||
|
"Inconsistent tensor shapes detected. Resizing tensors to the same shape."
|
||||||
|
)
|
||||||
|
desired_shape = shape_list[0]
|
||||||
|
logging.info(
|
||||||
|
f"Inconsistent size of input video frames. All frames will be resized to {desired_shape}"
|
||||||
|
)
|
||||||
|
|
||||||
|
resize_tensor_list = []
|
||||||
|
for t in args:
|
||||||
|
resize_tensor_list.append(
|
||||||
|
torch.nn.functional.interpolate(
|
||||||
|
input=t,
|
||||||
|
size=tuple(desired_shape),
|
||||||
|
mode="bilinear",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
result = resize_tensor_list
|
||||||
|
|
||||||
|
return result
|
||||||
199
src/utils/utils.py
Normal file
199
src/utils/utils.py
Normal file
@@ -0,0 +1,199 @@
|
|||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
from imageio import imread, imwrite
|
||||||
|
|
||||||
|
|
||||||
|
def read(file: Path) -> np.ndarray:
|
||||||
|
readers = {
|
||||||
|
".float3": readFloat,
|
||||||
|
".flo": readFlow,
|
||||||
|
".ppm": readImage,
|
||||||
|
".pgm": readImage,
|
||||||
|
".png": readImage,
|
||||||
|
".jpg": readImage,
|
||||||
|
".pfm": lambda f: readPFM(f)[0],
|
||||||
|
}
|
||||||
|
func = readers.get(file.suffix.lower())
|
||||||
|
if func is None:
|
||||||
|
raise Exception("don't know how to read %s" % file)
|
||||||
|
return func(file)
|
||||||
|
|
||||||
|
|
||||||
|
def write(file: Path, data: np.ndarray) -> None:
|
||||||
|
writers = {
|
||||||
|
".float3": writeFloat,
|
||||||
|
".flo": writeFlow,
|
||||||
|
".ppm": writeImage,
|
||||||
|
".pgm": writeImage,
|
||||||
|
".png": writeImage,
|
||||||
|
".jpg": writeImage,
|
||||||
|
".pfm": writePFM,
|
||||||
|
}
|
||||||
|
func = writers.get(file.suffix.lower())
|
||||||
|
if func is None:
|
||||||
|
raise Exception("don't know how to write %s" % file)
|
||||||
|
return func(file, data)
|
||||||
|
|
||||||
|
|
||||||
|
def readPFM(file: Path):
|
||||||
|
data = open(file, "rb")
|
||||||
|
|
||||||
|
color = None
|
||||||
|
width = None
|
||||||
|
height = None
|
||||||
|
scale = None
|
||||||
|
endian = None
|
||||||
|
|
||||||
|
header = data.readline().rstrip()
|
||||||
|
if header.decode("ascii") == "PF":
|
||||||
|
color = True
|
||||||
|
elif header.decode("ascii") == "Pf":
|
||||||
|
color = False
|
||||||
|
else:
|
||||||
|
raise Exception("Not a PFM file.")
|
||||||
|
|
||||||
|
dim_match = re.match(r"^(\d+)\s(\d+)\s$", data.readline().decode("ascii"))
|
||||||
|
if dim_match:
|
||||||
|
width, height = list(map(int, dim_match.groups()))
|
||||||
|
else:
|
||||||
|
raise Exception("Malformed PFM header.")
|
||||||
|
|
||||||
|
scale = float(data.readline().decode("ascii").rstrip())
|
||||||
|
if scale < 0:
|
||||||
|
endian = "<"
|
||||||
|
scale = -scale
|
||||||
|
else:
|
||||||
|
endian = ">"
|
||||||
|
|
||||||
|
result = np.fromfile(data, endian + "f")
|
||||||
|
shape = (height, width, 3) if color else (height, width)
|
||||||
|
|
||||||
|
result = np.reshape(result, shape)
|
||||||
|
result = np.flipud(result)
|
||||||
|
return result, scale
|
||||||
|
|
||||||
|
|
||||||
|
def writePFM(file: Path, image: np.ndarray, scale=1):
|
||||||
|
data = open(file, "wb")
|
||||||
|
|
||||||
|
color = None
|
||||||
|
|
||||||
|
if image.dtype.name != "float32":
|
||||||
|
raise Exception("Image dtype must be float32.")
|
||||||
|
|
||||||
|
image = np.flipud(image)
|
||||||
|
|
||||||
|
if len(image.shape) == 3 and image.shape[2] == 3:
|
||||||
|
color = True
|
||||||
|
elif len(image.shape) == 2 or len(image.shape) == 3 and image.shape[2] == 1:
|
||||||
|
color = False
|
||||||
|
else:
|
||||||
|
raise Exception("Image must have H x W x 3, H x W x 1 or H x W dimensions.")
|
||||||
|
|
||||||
|
data.write("PF\n" if color else "Pf\n".encode()) # type: ignore
|
||||||
|
data.write("%d %d\n".encode() % (image.shape[1], image.shape[0]))
|
||||||
|
|
||||||
|
endian = image.dtype.byteorder
|
||||||
|
|
||||||
|
if endian == "<" or endian == "=" and sys.byteorder == "little":
|
||||||
|
scale = -scale
|
||||||
|
|
||||||
|
data.write("%f\n".encode() % scale)
|
||||||
|
|
||||||
|
image.tofile(data)
|
||||||
|
|
||||||
|
|
||||||
|
def readFlow(file: Path):
|
||||||
|
if file.suffix.lower() == ".pfm":
|
||||||
|
return readPFM(file)[0][:, :, 0:2]
|
||||||
|
|
||||||
|
f = open(file, "rb")
|
||||||
|
|
||||||
|
header = f.read(4)
|
||||||
|
if header.decode("utf-8") != "PIEH":
|
||||||
|
raise Exception("Flow file header does not contain PIEH")
|
||||||
|
|
||||||
|
width = np.fromfile(f, np.int32, 1).squeeze()
|
||||||
|
height = np.fromfile(f, np.int32, 1).squeeze()
|
||||||
|
|
||||||
|
flow = np.fromfile(f, np.float32, width * height * 2).reshape((height, width, 2))
|
||||||
|
|
||||||
|
return flow.astype(np.float32)
|
||||||
|
|
||||||
|
|
||||||
|
def readImage(file: Path):
|
||||||
|
if file.suffix.lower() == ".pfm":
|
||||||
|
data = readPFM(file)[0]
|
||||||
|
if len(data.shape) == 3:
|
||||||
|
return data[:, :, 0:3]
|
||||||
|
else:
|
||||||
|
return data
|
||||||
|
return imread(file)
|
||||||
|
|
||||||
|
|
||||||
|
def writeImage(file: Path, data: np.ndarray):
|
||||||
|
if file.suffix.lower() == ".pfm":
|
||||||
|
return writePFM(file, data, 1)
|
||||||
|
return imwrite(file, data)
|
||||||
|
|
||||||
|
|
||||||
|
def writeFlow(file: Path, flow: np.ndarray):
|
||||||
|
f = open(file, "wb")
|
||||||
|
f.write("PIEH".encode("utf-8"))
|
||||||
|
np.array([flow.shape[1], flow.shape[0]], dtype=np.int32).tofile(f)
|
||||||
|
flow = flow.astype(np.float32)
|
||||||
|
flow.tofile(f)
|
||||||
|
|
||||||
|
|
||||||
|
def readFloat(file: Path):
|
||||||
|
f = open(file, "rb")
|
||||||
|
|
||||||
|
if (f.readline().decode("utf-8")) != "float\n":
|
||||||
|
raise Exception("float file %s did not contain <float> keyword" % file)
|
||||||
|
|
||||||
|
dim = int(f.readline())
|
||||||
|
|
||||||
|
dims = []
|
||||||
|
count = 1
|
||||||
|
for _ in range(0, dim):
|
||||||
|
d = int(f.readline())
|
||||||
|
dims.append(d)
|
||||||
|
count *= d
|
||||||
|
|
||||||
|
dims = list(reversed(dims))
|
||||||
|
|
||||||
|
data = np.fromfile(f, np.float32, count).reshape(dims)
|
||||||
|
if dim > 2:
|
||||||
|
data = np.transpose(data, (2, 1, 0))
|
||||||
|
data = np.transpose(data, (1, 0, 2))
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
def writeFloat(file: Path, data: np.ndarray):
|
||||||
|
f = open(file, "wb")
|
||||||
|
|
||||||
|
dim = len(data.shape)
|
||||||
|
if dim > 3:
|
||||||
|
raise Exception("bad float file dimension: %d" % dim)
|
||||||
|
|
||||||
|
f.write(("float\n").encode("ascii"))
|
||||||
|
f.write(("%d\n" % dim).encode("ascii"))
|
||||||
|
|
||||||
|
if dim == 1:
|
||||||
|
f.write(("%d\n" % data.shape[0]).encode("ascii"))
|
||||||
|
else:
|
||||||
|
f.write(("%d\n" % data.shape[1]).encode("ascii"))
|
||||||
|
f.write(("%d\n" % data.shape[0]).encode("ascii"))
|
||||||
|
for i in range(2, dim):
|
||||||
|
f.write(("%d\n" % data.shape[i]).encode("ascii"))
|
||||||
|
|
||||||
|
data = data.astype(np.float32)
|
||||||
|
if dim == 2:
|
||||||
|
data.tofile(f)
|
||||||
|
|
||||||
|
else:
|
||||||
|
np.transpose(data, (2, 0, 1)).tofile(f)
|
||||||
649
uv.lock
generated
Normal file
649
uv.lock
generated
Normal file
@@ -0,0 +1,649 @@
|
|||||||
|
version = 1
|
||||||
|
revision = 1
|
||||||
|
requires-python = ">=3.12"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "amt-apple"
|
||||||
|
version = "0.1.0"
|
||||||
|
source = { virtual = "." }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "imageio" },
|
||||||
|
{ name = "numpy" },
|
||||||
|
{ name = "omegaconf" },
|
||||||
|
{ name = "torch" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.metadata]
|
||||||
|
requires-dist = [
|
||||||
|
{ name = "imageio", specifier = ">=2.37.3" },
|
||||||
|
{ name = "numpy", specifier = ">=2.4.4" },
|
||||||
|
{ name = "omegaconf", specifier = ">=2.3.0" },
|
||||||
|
{ name = "torch", specifier = ">=2.11.0" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "antlr4-python3-runtime"
|
||||||
|
version = "4.9.3"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/3e/38/7859ff46355f76f8d19459005ca000b6e7012f2f1ca597746cbcd1fbfe5e/antlr4-python3-runtime-4.9.3.tar.gz", hash = "sha256:f224469b4168294902bb1efa80a8bf7855f24c99aef99cbefc1bcd3cce77881b", size = 117034 }
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "cuda-bindings"
|
||||||
|
version = "13.2.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "cuda-pathfinder" },
|
||||||
|
]
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/52/c8/b2589d68acf7e3d63e2be330b84bc25712e97ed799affbca7edd7eae25d6/cuda_bindings-13.2.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e865447abfb83d6a98ad5130ed3c70b1fc295ae3eeee39fd07b4ddb0671b6788", size = 5722404 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/1f/92/f899f7bbb5617bb65ec52a6eac1e9a1447a86b916c4194f8a5001b8cde0c/cuda_bindings-13.2.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:46d8776a55d6d5da9dd6e9858fba2efcda2abe6743871dee47dd06eb8cb6d955", size = 6320619 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/df/93/eef988860a3ca985f82c4f3174fc0cdd94e07331ba9a92e8e064c260337f/cuda_bindings-13.2.0-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6629ca2df6f795b784752409bcaedbd22a7a651b74b56a165ebc0c9dcbd504d0", size = 5614610 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/18/23/6db3aba46864aee357ab2415135b3fe3da7e9f1fa0221fa2a86a5968099c/cuda_bindings-13.2.0-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7dca0da053d3b4cc4869eff49c61c03f3c5dbaa0bcd712317a358d5b8f3f385d", size = 6149914 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c0/87/87a014f045b77c6de5c8527b0757fe644417b184e5367db977236a141602/cuda_bindings-13.2.0-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a6464b30f46692d6c7f65d4a0e0450d81dd29de3afc1bb515653973d01c2cd6e", size = 5685673 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ee/5e/c0fe77a73aaefd3fff25ffaccaac69c5a63eafdf8b9a4c476626ef0ac703/cuda_bindings-13.2.0-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f4af9f3e1be603fa12d5ad6cfca7844c9d230befa9792b5abdf7dd79979c3626", size = 6191386 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/5f/58/ed2c3b39c8dd5f96aa7a4abef0d47a73932c7a988e30f5fa428f00ed0da1/cuda_bindings-13.2.0-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:df850a1ff8ce1b3385257b08e47b70e959932f5f432d0a4e46a355962b4e4771", size = 5507469 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/1f/01/0c941b112ceeb21439b05895eace78ca1aa2eaaf695c8521a068fd9b4c00/cuda_bindings-13.2.0-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e8a16384c6494e5485f39314b0b4afb04bee48d49edb16d5d8593fd35bbd231b", size = 6059693 },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "cuda-pathfinder"
|
||||||
|
version = "1.5.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/93/66/0c02bd330e7d976f83fa68583d6198d76f23581bcbb5c0e98a6148f326e5/cuda_pathfinder-1.5.0-py3-none-any.whl", hash = "sha256:498f90a9e9de36044a7924742aecce11c50c49f735f1bc53e05aa46de9ea4110", size = 49739 },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "cuda-toolkit"
|
||||||
|
version = "13.0.2"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/57/b2/453099f5f3b698d7d0eab38916aac44c7f76229f451709e2eb9db6615dcd/cuda_toolkit-13.0.2-py2.py3-none-any.whl", hash = "sha256:b198824cf2f54003f50d64ada3a0f184b42ca0846c1c94192fa269ecd97a66eb", size = 2364 },
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.optional-dependencies]
|
||||||
|
cublas = [
|
||||||
|
{ name = "nvidia-cublas", marker = "sys_platform == 'linux' or sys_platform == 'win32'" },
|
||||||
|
]
|
||||||
|
cudart = [
|
||||||
|
{ name = "nvidia-cuda-runtime", marker = "sys_platform == 'linux' or sys_platform == 'win32'" },
|
||||||
|
]
|
||||||
|
cufft = [
|
||||||
|
{ name = "nvidia-cufft", marker = "sys_platform == 'linux' or sys_platform == 'win32'" },
|
||||||
|
]
|
||||||
|
cufile = [
|
||||||
|
{ name = "nvidia-cufile", marker = "sys_platform == 'linux'" },
|
||||||
|
]
|
||||||
|
cupti = [
|
||||||
|
{ name = "nvidia-cuda-cupti", marker = "sys_platform == 'linux' or sys_platform == 'win32'" },
|
||||||
|
]
|
||||||
|
curand = [
|
||||||
|
{ name = "nvidia-curand", marker = "sys_platform == 'linux' or sys_platform == 'win32'" },
|
||||||
|
]
|
||||||
|
cusolver = [
|
||||||
|
{ name = "nvidia-cusolver", marker = "sys_platform == 'linux' or sys_platform == 'win32'" },
|
||||||
|
]
|
||||||
|
cusparse = [
|
||||||
|
{ name = "nvidia-cusparse", marker = "sys_platform == 'linux' or sys_platform == 'win32'" },
|
||||||
|
]
|
||||||
|
nvjitlink = [
|
||||||
|
{ name = "nvidia-nvjitlink", marker = "sys_platform == 'linux' or sys_platform == 'win32'" },
|
||||||
|
]
|
||||||
|
nvrtc = [
|
||||||
|
{ name = "nvidia-cuda-nvrtc", marker = "sys_platform == 'linux' or sys_platform == 'win32'" },
|
||||||
|
]
|
||||||
|
nvtx = [
|
||||||
|
{ name = "nvidia-nvtx", marker = "sys_platform == 'linux' or sys_platform == 'win32'" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "filelock"
|
||||||
|
version = "3.25.2"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/94/b8/00651a0f559862f3bb7d6f7477b192afe3f583cc5e26403b44e59a55ab34/filelock-3.25.2.tar.gz", hash = "sha256:b64ece2b38f4ca29dd3e810287aa8c48182bbecd1ae6e9ae126c9b35f1382694", size = 40480 }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a4/a5/842ae8f0c08b61d6484b52f99a03510a3a72d23141942d216ebe81fefbce/filelock-3.25.2-py3-none-any.whl", hash = "sha256:ca8afb0da15f229774c9ad1b455ed96e85a81373065fb10446672f64444ddf70", size = 26759 },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "fsspec"
|
||||||
|
version = "2026.3.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/e1/cf/b50ddf667c15276a9ab15a70ef5f257564de271957933ffea49d2cdbcdfb/fsspec-2026.3.0.tar.gz", hash = "sha256:1ee6a0e28677557f8c2f994e3eea77db6392b4de9cd1f5d7a9e87a0ae9d01b41", size = 313547 }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d5/1f/5f4a3cd9e4440e9d9bc78ad0a91a1c8d46b4d429d5239ebe6793c9fe5c41/fsspec-2026.3.0-py3-none-any.whl", hash = "sha256:d2ceafaad1b3457968ed14efa28798162f1638dbb5d2a6868a2db002a5ee39a4", size = 202595 },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "imageio"
|
||||||
|
version = "2.37.3"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "numpy" },
|
||||||
|
{ name = "pillow" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/b1/84/93bcd1300216ea50811cee96873b84a1bebf8d0489ffaf7f2a3756bab866/imageio-2.37.3.tar.gz", hash = "sha256:bbb37efbfc4c400fcd534b367b91fcd66d5da639aaa138034431a1c5e0a41451", size = 389673 }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/49/fa/391e437a34e55095173dca5f24070d89cbc233ff85bf1c29c93248c6588d/imageio-2.37.3-py3-none-any.whl", hash = "sha256:46f5bb8522cd421c0f5ae104d8268f569d856b29eb1a13b92829d1970f32c9f0", size = 317646 },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "jinja2"
|
||||||
|
version = "3.1.6"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "markupsafe" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115 }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899 },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "markupsafe"
|
||||||
|
version = "3.0.3"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313 }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/5a/72/147da192e38635ada20e0a2e1a51cf8823d2119ce8883f7053879c2199b5/markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e", size = 11615 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/9a/81/7e4e08678a1f98521201c3079f77db69fb552acd56067661f8c2f534a718/markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce", size = 12020 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/1e/2c/799f4742efc39633a1b54a92eec4082e4f815314869865d876824c257c1e/markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d", size = 24332 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/3c/2e/8d0c2ab90a8c1d9a24f0399058ab8519a3279d1bd4289511d74e909f060e/markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d", size = 22947 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/2c/54/887f3092a85238093a0b2154bd629c89444f395618842e8b0c41783898ea/markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a", size = 21962 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c9/2f/336b8c7b6f4a4d95e91119dc8521402461b74a485558d8f238a68312f11c/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b", size = 23760 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/32/43/67935f2b7e4982ffb50a4d169b724d74b62a3964bc1a9a527f5ac4f1ee2b/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f", size = 21529 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/89/e0/4486f11e51bbba8b0c041098859e869e304d1c261e59244baa3d295d47b7/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b", size = 23015 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/2f/e1/78ee7a023dac597a5825441ebd17170785a9dab23de95d2c7508ade94e0e/markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d", size = 14540 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/aa/5b/bec5aa9bbbb2c946ca2733ef9c4ca91c91b6a24580193e891b5f7dbe8e1e/markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c", size = 15105 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e5/f1/216fc1bbfd74011693a4fd837e7026152e89c4bcf3e77b6692fba9923123/markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f", size = 13906 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/38/2f/907b9c7bbba283e68f20259574b13d005c121a0fa4c175f9bed27c4597ff/markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795", size = 11622 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/9c/d9/5f7756922cdd676869eca1c4e3c0cd0df60ed30199ffd775e319089cb3ed/markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219", size = 12029 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/00/07/575a68c754943058c78f30db02ee03a64b3c638586fba6a6dd56830b30a3/markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6", size = 24374 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a9/21/9b05698b46f218fc0e118e1f8168395c65c8a2c750ae2bab54fc4bd4e0e8/markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676", size = 22980 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/7f/71/544260864f893f18b6827315b988c146b559391e6e7e8f7252839b1b846a/markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9", size = 21990 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c2/28/b50fc2f74d1ad761af2f5dcce7492648b983d00a65b8c0e0cb457c82ebbe/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1", size = 23784 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ed/76/104b2aa106a208da8b17a2fb72e033a5a9d7073c68f7e508b94916ed47a9/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc", size = 21588 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b5/99/16a5eb2d140087ebd97180d95249b00a03aa87e29cc224056274f2e45fd6/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12", size = 23041 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/19/bc/e7140ed90c5d61d77cea142eed9f9c303f4c4806f60a1044c13e3f1471d0/markupsafe-3.0.3-cp313-cp313-win32.whl", hash = "sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed", size = 14543 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/05/73/c4abe620b841b6b791f2edc248f556900667a5a1cf023a6646967ae98335/markupsafe-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5", size = 15113 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f0/3a/fa34a0f7cfef23cf9500d68cb7c32dd64ffd58a12b09225fb03dd37d5b80/markupsafe-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485", size = 13911 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e4/d7/e05cd7efe43a88a17a37b3ae96e79a19e846f3f456fe79c57ca61356ef01/markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73", size = 11658 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/99/9e/e412117548182ce2148bdeacdda3bb494260c0b0184360fe0d56389b523b/markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37", size = 12066 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/bc/e6/fa0ffcda717ef64a5108eaa7b4f5ed28d56122c9a6d70ab8b72f9f715c80/markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19", size = 25639 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/96/ec/2102e881fe9d25fc16cb4b25d5f5cde50970967ffa5dddafdb771237062d/markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025", size = 23569 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/4b/30/6f2fce1f1f205fc9323255b216ca8a235b15860c34b6798f810f05828e32/markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6", size = 23284 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/58/47/4a0ccea4ab9f5dcb6f79c0236d954acb382202721e704223a8aafa38b5c8/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f", size = 24801 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/6a/70/3780e9b72180b6fecb83a4814d84c3bf4b4ae4bf0b19c27196104149734c/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb", size = 22769 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/98/c5/c03c7f4125180fc215220c035beac6b9cb684bc7a067c84fc69414d315f5/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009", size = 23642 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/80/d6/2d1b89f6ca4bff1036499b1e29a1d02d282259f3681540e16563f27ebc23/markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354", size = 14612 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/2b/98/e48a4bfba0a0ffcf9925fe2d69240bfaa19c6f7507b8cd09c70684a53c1e/markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218", size = 15200 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/0e/72/e3cc540f351f316e9ed0f092757459afbc595824ca724cbc5a5d4263713f/markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287", size = 13973 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/33/8a/8e42d4838cd89b7dde187011e97fe6c3af66d8c044997d2183fbd6d31352/markupsafe-3.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:eaa9599de571d72e2daf60164784109f19978b327a3910d3e9de8c97b5b70cfe", size = 11619 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b5/64/7660f8a4a8e53c924d0fa05dc3a55c9cee10bbd82b11c5afb27d44b096ce/markupsafe-3.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c47a551199eb8eb2121d4f0f15ae0f923d31350ab9280078d1e5f12b249e0026", size = 12029 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/da/ef/e648bfd021127bef5fa12e1720ffed0c6cbb8310c8d9bea7266337ff06de/markupsafe-3.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f34c41761022dd093b4b6896d4810782ffbabe30f2d443ff5f083e0cbbb8c737", size = 24408 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/41/3c/a36c2450754618e62008bf7435ccb0f88053e07592e6028a34776213d877/markupsafe-3.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:457a69a9577064c05a97c41f4e65148652db078a3a509039e64d3467b9e7ef97", size = 23005 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/bc/20/b7fdf89a8456b099837cd1dc21974632a02a999ec9bf7ca3e490aacd98e7/markupsafe-3.0.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e8afc3f2ccfa24215f8cb28dcf43f0113ac3c37c2f0f0806d8c70e4228c5cf4d", size = 22048 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/9a/a7/591f592afdc734f47db08a75793a55d7fbcc6902a723ae4cfbab61010cc5/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ec15a59cf5af7be74194f7ab02d0f59a62bdcf1a537677ce67a2537c9b87fcda", size = 23821 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/7d/33/45b24e4f44195b26521bc6f1a82197118f74df348556594bd2262bda1038/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:0eb9ff8191e8498cca014656ae6b8d61f39da5f95b488805da4bb029cccbfbaf", size = 21606 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ff/0e/53dfaca23a69fbfbbf17a4b64072090e70717344c52eaaaa9c5ddff1e5f0/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2713baf880df847f2bece4230d4d094280f4e67b1e813eec43b4c0e144a34ffe", size = 23043 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/46/11/f333a06fc16236d5238bfe74daccbca41459dcd8d1fa952e8fbd5dccfb70/markupsafe-3.0.3-cp314-cp314-win32.whl", hash = "sha256:729586769a26dbceff69f7a7dbbf59ab6572b99d94576a5592625d5b411576b9", size = 14747 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/28/52/182836104b33b444e400b14f797212f720cbc9ed6ba34c800639d154e821/markupsafe-3.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:bdc919ead48f234740ad807933cdf545180bfbe9342c2bb451556db2ed958581", size = 15341 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/6f/18/acf23e91bd94fd7b3031558b1f013adfa21a8e407a3fdb32745538730382/markupsafe-3.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:5a7d5dc5140555cf21a6fefbdbf8723f06fcd2f63ef108f2854de715e4422cb4", size = 14073 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/3c/f0/57689aa4076e1b43b15fdfa646b04653969d50cf30c32a102762be2485da/markupsafe-3.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:1353ef0c1b138e1907ae78e2f6c63ff67501122006b0f9abad68fda5f4ffc6ab", size = 11661 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/89/c3/2e67a7ca217c6912985ec766c6393b636fb0c2344443ff9d91404dc4c79f/markupsafe-3.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1085e7fbddd3be5f89cc898938f42c0b3c711fdcb37d75221de2666af647c175", size = 12069 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f0/00/be561dce4e6ca66b15276e184ce4b8aec61fe83662cce2f7d72bd3249d28/markupsafe-3.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b52b4fb9df4eb9ae465f8d0c228a00624de2334f216f178a995ccdcf82c4634", size = 25670 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/50/09/c419f6f5a92e5fadde27efd190eca90f05e1261b10dbd8cbcb39cd8ea1dc/markupsafe-3.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed51ac40f757d41b7c48425901843666a6677e3e8eb0abcff09e4ba6e664f50", size = 23598 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/22/44/a0681611106e0b2921b3033fc19bc53323e0b50bc70cffdd19f7d679bb66/markupsafe-3.0.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f190daf01f13c72eac4efd5c430a8de82489d9cff23c364c3ea822545032993e", size = 23261 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/5f/57/1b0b3f100259dc9fffe780cfb60d4be71375510e435efec3d116b6436d43/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e56b7d45a839a697b5eb268c82a71bd8c7f6c94d6fd50c3d577fa39a9f1409f5", size = 24835 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/26/6a/4bf6d0c97c4920f1597cc14dd720705eca0bf7c787aebc6bb4d1bead5388/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:f3e98bb3798ead92273dc0e5fd0f31ade220f59a266ffd8a4f6065e0a3ce0523", size = 22733 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/14/c7/ca723101509b518797fedc2fdf79ba57f886b4aca8a7d31857ba3ee8281f/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5678211cb9333a6468fb8d8be0305520aa073f50d17f089b5b4b477ea6e67fdc", size = 23672 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/fb/df/5bd7a48c256faecd1d36edc13133e51397e41b73bb77e1a69deab746ebac/markupsafe-3.0.3-cp314-cp314t-win32.whl", hash = "sha256:915c04ba3851909ce68ccc2b8e2cd691618c4dc4c4232fb7982bca3f41fd8c3d", size = 14819 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/1a/8a/0402ba61a2f16038b48b39bccca271134be00c5c9f0f623208399333c448/markupsafe-3.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4faffd047e07c38848ce017e8725090413cd80cbc23d86e55c587bf979e579c9", size = 15426 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/70/bc/6f1c2f612465f5fa89b95bead1f44dcb607670fd42891d8fdcd5d039f4f4/markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa", size = 14146 },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "mpmath"
|
||||||
|
version = "1.3.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/e0/47/dd32fa426cc72114383ac549964eecb20ecfd886d1e5ccf5340b55b02f57/mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f", size = 508106 }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c", size = 536198 },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "networkx"
|
||||||
|
version = "3.6.1"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/6a/51/63fe664f3908c97be9d2e4f1158eb633317598cfa6e1fc14af5383f17512/networkx-3.6.1.tar.gz", hash = "sha256:26b7c357accc0c8cde558ad486283728b65b6a95d85ee1cd66bafab4c8168509", size = 2517025 }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/9e/c9/b2622292ea83fbb4ec318f5b9ab867d0a28ab43c5717bb85b0a5f6b3b0a4/networkx-3.6.1-py3-none-any.whl", hash = "sha256:d47fbf302e7d9cbbb9e2555a0d267983d2aa476bac30e90dfbe5669bd57f3762", size = 2068504 },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "numpy"
|
||||||
|
version = "2.4.4"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/d7/9f/b8cef5bffa569759033adda9481211426f12f53299629b410340795c2514/numpy-2.4.4.tar.gz", hash = "sha256:2d390634c5182175533585cc89f3608a4682ccb173cc9bb940b2881c8d6f8fa0", size = 20731587 }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/28/05/32396bec30fb2263770ee910142f49c1476d08e8ad41abf8403806b520ce/numpy-2.4.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:15716cfef24d3a9762e3acdf87e27f58dc823d1348f765bbea6bef8c639bfa1b", size = 16689272 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c5/f3/a983d28637bfcd763a9c7aafdb6d5c0ebf3d487d1e1459ffdb57e2f01117/numpy-2.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23cbfd4c17357c81021f21540da84ee282b9c8fba38a03b7b9d09ba6b951421e", size = 14699573 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/9b/fd/e5ecca1e78c05106d98028114f5c00d3eddb41207686b2b7de3e477b0e22/numpy-2.4.4-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:8b3b60bb7cba2c8c81837661c488637eee696f59a877788a396d33150c35d842", size = 5204782 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/de/2f/702a4594413c1a8632092beae8aba00f1d67947389369b3777aed783fdca/numpy-2.4.4-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:e4a010c27ff6f210ff4c6ef34394cd61470d01014439b192ec22552ee867f2a8", size = 6552038 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/7f/37/eed308a8f56cba4d1fdf467a4fc67ef4ff4bf1c888f5fc980481890104b1/numpy-2.4.4-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f9e75681b59ddaa5e659898085ae0eaea229d054f2ac0c7e563a62205a700121", size = 15670666 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/0a/0d/0e3ecece05b7a7e87ab9fb587855548da437a061326fff64a223b6dcb78a/numpy-2.4.4-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:81f4a14bee47aec54f883e0cad2d73986640c1590eb9bfaaba7ad17394481e6e", size = 16645480 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/34/49/f2312c154b82a286758ee2f1743336d50651f8b5195db18cdb63675ff649/numpy-2.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:62d6b0f03b694173f9fcb1fb317f7222fd0b0b103e784c6549f5e53a27718c44", size = 17020036 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/7b/e9/736d17bd77f1b0ec4f9901aaec129c00d59f5d84d5e79bba540ef12c2330/numpy-2.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fbc356aae7adf9e6336d336b9c8111d390a05df88f1805573ebb0807bd06fd1d", size = 18368643 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/63/f6/d417977c5f519b17c8a5c3bc9e8304b0908b0e21136fe43bf628a1343914/numpy-2.4.4-cp312-cp312-win32.whl", hash = "sha256:0d35aea54ad1d420c812bfa0385c71cd7cc5bcf7c65fed95fc2cd02fe8c79827", size = 5961117 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/2d/5b/e1deebf88ff431b01b7406ca3583ab2bbb90972bbe1c568732e49c844f7e/numpy-2.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:b5f0362dc928a6ecd9db58868fca5e48485205e3855957bdedea308f8672ea4a", size = 12320584 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/58/89/e4e856ac82a68c3ed64486a544977d0e7bdd18b8da75b78a577ca31c4395/numpy-2.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:846300f379b5b12cc769334464656bc882e0735d27d9726568bc932fdc49d5ec", size = 10221450 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/14/1d/d0a583ce4fefcc3308806a749a536c201ed6b5ad6e1322e227ee4848979d/numpy-2.4.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:08f2e31ed5e6f04b118e49821397f12767934cfdd12a1ce86a058f91e004ee50", size = 16684933 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c1/62/2b7a48fbb745d344742c0277f01286dead15f3f68e4f359fbfcf7b48f70f/numpy-2.4.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e823b8b6edc81e747526f70f71a9c0a07ac4e7ad13020aa736bb7c9d67196115", size = 14694532 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e5/87/499737bfba066b4a3bebff24a8f1c5b2dee410b209bc6668c9be692580f0/numpy-2.4.4-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:4a19d9dba1a76618dd86b164d608566f393f8ec6ac7c44f0cc879011c45e65af", size = 5199661 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/cd/da/464d551604320d1491bc345efed99b4b7034143a85787aab78d5691d5a0e/numpy-2.4.4-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:d2a8490669bfe99a233298348acc2d824d496dee0e66e31b66a6022c2ad74a5c", size = 6547539 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/7d/90/8d23e3b0dafd024bf31bdec225b3bb5c2dbfa6912f8a53b8659f21216cbf/numpy-2.4.4-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:45dbed2ab436a9e826e302fcdcbe9133f9b0006e5af7168afb8963a6520da103", size = 15668806 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d1/73/a9d864e42a01896bb5974475438f16086be9ba1f0d19d0bb7a07427c4a8b/numpy-2.4.4-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c901b15172510173f5cb310eae652908340f8dede90fff9e3bf6c0d8dfd92f83", size = 16632682 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/34/fb/14570d65c3bde4e202a031210475ae9cde9b7686a2e7dc97ee67d2833b35/numpy-2.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:99d838547ace2c4aace6c4f76e879ddfe02bb58a80c1549928477862b7a6d6ed", size = 17019810 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/8a/77/2ba9d87081fd41f6d640c83f26fb7351e536b7ce6dd9061b6af5904e8e46/numpy-2.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0aec54fd785890ecca25a6003fd9a5aed47ad607bbac5cd64f836ad8666f4959", size = 18357394 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a2/23/52666c9a41708b0853fa3b1a12c90da38c507a3074883823126d4e9d5b30/numpy-2.4.4-cp313-cp313-win32.whl", hash = "sha256:07077278157d02f65c43b1b26a3886bce886f95d20aabd11f87932750dfb14ed", size = 5959556 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/57/fb/48649b4971cde70d817cf97a2a2fdc0b4d8308569f1dd2f2611959d2e0cf/numpy-2.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:5c70f1cc1c4efbe316a572e2d8b9b9cc44e89b95f79ca3331553fbb63716e2bf", size = 12317311 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ba/d8/11490cddd564eb4de97b4579ef6bfe6a736cc07e94c1598590ae25415e01/numpy-2.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:ef4059d6e5152fa1a39f888e344c73fdc926e1b2dd58c771d67b0acfbf2aa67d", size = 10222060 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/99/5d/dab4339177a905aad3e2221c915b35202f1ec30d750dd2e5e9d9a72b804b/numpy-2.4.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4bbc7f303d125971f60ec0aaad5e12c62d0d2c925f0ab1273debd0e4ba37aba5", size = 14822302 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/eb/e4/0564a65e7d3d97562ed6f9b0fd0fb0a6f559ee444092f105938b50043876/numpy-2.4.4-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:4d6d57903571f86180eb98f8f0c839fa9ebbfb031356d87f1361be91e433f5b7", size = 5327407 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/29/8d/35a3a6ce5ad371afa58b4700f1c820f8f279948cca32524e0a695b0ded83/numpy-2.4.4-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:4636de7fd195197b7535f231b5de9e4b36d2c440b6e566d2e4e4746e6af0ca93", size = 6647631 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f4/da/477731acbd5a58a946c736edfdabb2ac5b34c3d08d1ba1a7b437fa0884df/numpy-2.4.4-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ad2e2ef14e0b04e544ea2fa0a36463f847f113d314aa02e5b402fdf910ef309e", size = 15727691 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e6/db/338535d9b152beabeb511579598418ba0212ce77cf9718edd70262cc4370/numpy-2.4.4-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5a285b3b96f951841799528cd1f4f01cd70e7e0204b4abebac9463eecfcf2a40", size = 16681241 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e2/a9/ad248e8f58beb7a0219b413c9c7d8151c5d285f7f946c3e26695bdbbe2df/numpy-2.4.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:f8474c4241bc18b750be2abea9d7a9ec84f46ef861dbacf86a4f6e043401f79e", size = 17085767 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b5/1a/3b88ccd3694681356f70da841630e4725a7264d6a885c8d442a697e1146b/numpy-2.4.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4e874c976154687c1f71715b034739b45c7711bec81db01914770373d125e392", size = 18403169 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c2/c9/fcfd5d0639222c6eac7f304829b04892ef51c96a75d479214d77e3ce6e33/numpy-2.4.4-cp313-cp313t-win32.whl", hash = "sha256:9c585a1790d5436a5374bac930dad6ed244c046ed91b2b2a3634eb2971d21008", size = 6083477 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d5/e3/3938a61d1c538aaec8ed6fd6323f57b0c2d2d2219512434c5c878db76553/numpy-2.4.4-cp313-cp313t-win_amd64.whl", hash = "sha256:93e15038125dc1e5345d9b5b68aa7f996ec33b98118d18c6ca0d0b7d6198b7e8", size = 12457487 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/97/6a/7e345032cc60501721ef94e0e30b60f6b0bd601f9174ebd36389a2b86d40/numpy-2.4.4-cp313-cp313t-win_arm64.whl", hash = "sha256:0dfd3f9d3adbe2920b68b5cd3d51444e13a10792ec7154cd0a2f6e74d4ab3233", size = 10292002 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/6e/06/c54062f85f673dd5c04cbe2f14c3acb8c8b95e3384869bb8cc9bff8cb9df/numpy-2.4.4-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:f169b9a863d34f5d11b8698ead99febeaa17a13ca044961aa8e2662a6c7766a0", size = 16684353 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/4c/39/8a320264a84404c74cc7e79715de85d6130fa07a0898f67fb5cd5bd79908/numpy-2.4.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:2483e4584a1cb3092da4470b38866634bafb223cbcd551ee047633fd2584599a", size = 14704914 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/91/fb/287076b2614e1d1044235f50f03748f31fa287e3dbe6abeb35cdfa351eca/numpy-2.4.4-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:2d19e6e2095506d1736b7d80595e0f252d76b89f5e715c35e06e937679ea7d7a", size = 5210005 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/63/eb/fcc338595309910de6ecabfcef2419a9ce24399680bfb149421fa2df1280/numpy-2.4.4-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:6a246d5914aa1c820c9443ddcee9c02bec3e203b0c080349533fae17727dfd1b", size = 6544974 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/44/5d/e7e9044032a716cdfaa3fba27a8e874bf1c5f1912a1ddd4ed071bf8a14a6/numpy-2.4.4-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:989824e9faf85f96ec9c7761cd8d29c531ad857bfa1daa930cba85baaecf1a9a", size = 15684591 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/98/7c/21252050676612625449b4807d6b695b9ce8a7c9e1c197ee6216c8a65c7c/numpy-2.4.4-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:27a8d92cd10f1382a67d7cf4db7ce18341b66438bdd9f691d7b0e48d104c2a9d", size = 16637700 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b1/29/56d2bbef9465db24ef25393383d761a1af4f446a1df9b8cded4fe3a5a5d7/numpy-2.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:e44319a2953c738205bf3354537979eaa3998ed673395b964c1176083dd46252", size = 17035781 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e3/2b/a35a6d7589d21f44cea7d0a98de5ddcbb3d421b2622a5c96b1edf18707c3/numpy-2.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e892aff75639bbef0d2a2cfd55535510df26ff92f63c92cd84ef8d4ba5a5557f", size = 18362959 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/64/c9/d52ec581f2390e0f5f85cbfd80fb83d965fc15e9f0e1aec2195faa142cde/numpy-2.4.4-cp314-cp314-win32.whl", hash = "sha256:1378871da56ca8943c2ba674530924bb8ca40cd228358a3b5f302ad60cf875fc", size = 6008768 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/fa/22/4cc31a62a6c7b74a8730e31a4274c5dc80e005751e277a2ce38e675e4923/numpy-2.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:715d1c092715954784bc79e1174fc2a90093dc4dc84ea15eb14dad8abdcdeb74", size = 12449181 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/70/2e/14cda6f4d8e396c612d1bf97f22958e92148801d7e4f110cabebdc0eef4b/numpy-2.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:2c194dd721e54ecad9ad387c1d35e63dce5c4450c6dc7dd5611283dda239aabb", size = 10496035 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b1/e8/8fed8c8d848d7ecea092dc3469643f9d10bc3a134a815a3b033da1d2039b/numpy-2.4.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2aa0613a5177c264ff5921051a5719d20095ea586ca88cc802c5c218d1c67d3e", size = 14824958 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/05/1a/d8007a5138c179c2bf33ef44503e83d70434d2642877ee8fbb230e7c0548/numpy-2.4.4-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:42c16925aa5a02362f986765f9ebabf20de75cdefdca827d14315c568dcab113", size = 5330020 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/99/64/ffb99ac6ae93faf117bcbd5c7ba48a7f45364a33e8e458545d3633615dda/numpy-2.4.4-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:874f200b2a981c647340f841730fc3a2b54c9d940566a3c4149099591e2c4c3d", size = 6650758 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/6e/6e/795cc078b78a384052e73b2f6281ff7a700e9bf53bcce2ee579d4f6dd879/numpy-2.4.4-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c9b39d38a9bd2ae1becd7eac1303d031c5c110ad31f2b319c6e7d98b135c934d", size = 15729948 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/5f/86/2acbda8cc2af5f3d7bfc791192863b9e3e19674da7b5e533fded124d1299/numpy-2.4.4-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b268594bccac7d7cf5844c7732e3f20c50921d94e36d7ec9b79e9857694b1b2f", size = 16679325 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/bc/59/cafd83018f4aa55e0ac6fa92aa066c0a1877b77a615ceff1711c260ffae8/numpy-2.4.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:ac6b31e35612a26483e20750126d30d0941f949426974cace8e6b5c58a3657b0", size = 17084883 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f0/85/a42548db84e65ece46ab2caea3d3f78b416a47af387fcbb47ec28e660dc2/numpy-2.4.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8e3ed142f2728df44263aaf5fb1f5b0b99f4070c553a0d7f033be65338329150", size = 18403474 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ed/ad/483d9e262f4b831000062e5d8a45e342166ec8aaa1195264982bca267e62/numpy-2.4.4-cp314-cp314t-win32.whl", hash = "sha256:dddbbd259598d7240b18c9d87c56a9d2fb3b02fe266f49a7c101532e78c1d871", size = 6155500 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c7/03/2fc4e14c7bd4ff2964b74ba90ecb8552540b6315f201df70f137faa5c589/numpy-2.4.4-cp314-cp314t-win_amd64.whl", hash = "sha256:a7164afb23be6e37ad90b2f10426149fd75aee07ca55653d2aa41e66c4ef697e", size = 12637755 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/58/78/548fb8e07b1a341746bfbecb32f2c268470f45fa028aacdbd10d9bc73aab/numpy-2.4.4-cp314-cp314t-win_arm64.whl", hash = "sha256:ba203255017337d39f89bdd58417f03c4426f12beed0440cfd933cb15f8669c7", size = 10566643 },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "nvidia-cublas"
|
||||||
|
version = "13.1.0.3"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e1/a5/fce49e2ae977e0ccc084e5adafceb4f0ac0c8333cb6863501618a7277f67/nvidia_cublas-13.1.0.3-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:c86fc7f7ae36d7528288c5d88098edcb7b02c633d262e7ddbb86b0ad91be5df2", size = 542851226 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e7/44/423ac00af4dd95a5aeb27207e2c0d9b7118702149bf4704c3ddb55bb7429/nvidia_cublas-13.1.0.3-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:ee8722c1f0145ab246bccb9e452153b5e0515fd094c3678df50b2a0888b8b171", size = 423133236 },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "nvidia-cuda-cupti"
|
||||||
|
version = "13.0.85"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/2a/2a/80353b103fc20ce05ef51e928daed4b6015db4aaa9162ed0997090fe2250/nvidia_cuda_cupti-13.0.85-py3-none-manylinux_2_25_aarch64.whl", hash = "sha256:796bd679890ee55fb14a94629b698b6db54bcfd833d391d5e94017dd9d7d3151", size = 10310827 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/33/6d/737d164b4837a9bbd202f5ae3078975f0525a55730fe871d8ed4e3b952b0/nvidia_cuda_cupti-13.0.85-py3-none-manylinux_2_25_x86_64.whl", hash = "sha256:4eb01c08e859bf924d222250d2e8f8b8ff6d3db4721288cf35d14252a4d933c8", size = 10715597 },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "nvidia-cuda-nvrtc"
|
||||||
|
version = "13.0.88"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c3/68/483a78f5e8f31b08fb1bb671559968c0ca3a065ac7acabfc7cee55214fd6/nvidia_cuda_nvrtc-13.0.88-py3-none-manylinux2010_x86_64.manylinux_2_12_x86_64.whl", hash = "sha256:ad9b6d2ead2435f11cbb6868809d2adeeee302e9bb94bcf0539c7a40d80e8575", size = 90215200 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b7/dc/6bb80850e0b7edd6588d560758f17e0550893a1feaf436807d64d2da040f/nvidia_cuda_nvrtc-13.0.88-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d27f20a0ca67a4bb34268a5e951033496c5b74870b868bacd046b1b8e0c3267b", size = 43015449 },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "nvidia-cuda-runtime"
|
||||||
|
version = "13.0.96"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/87/4f/17d7b9b8e285199c58ce28e31b5c5bbaa4d8271af06a89b6405258245de2/nvidia_cuda_runtime-13.0.96-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ef9bcbe90493a2b9d810e43d249adb3d02e98dd30200d86607d8d02687c43f55", size = 2261060 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/2e/24/d1558f3b68b1d26e706813b1d10aa1d785e4698c425af8db8edc3dced472/nvidia_cuda_runtime-13.0.96-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7f82250d7782aa23b6cfe765ecc7db554bd3c2870c43f3d1821f1d18aebf0548", size = 2243632 },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "nvidia-cudnn-cu13"
|
||||||
|
version = "9.19.0.56"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "nvidia-cublas" },
|
||||||
|
]
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f1/84/26025437c1e6b61a707442184fa0c03d083b661adf3a3eecfd6d21677740/nvidia_cudnn_cu13-9.19.0.56-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:6ed29ffaee1176c612daf442e4dd6cfeb6a0caa43ddcbeb59da94953030b1be4", size = 433781201 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a3/22/0b4b932655d17a6da1b92fa92ab12844b053bb2ac2475e179ba6f043da1e/nvidia_cudnn_cu13-9.19.0.56-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:d20e1734305e9d68889a96e3f35094d733ff1f83932ebe462753973e53a572bf", size = 366066321 },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "nvidia-cufft"
|
||||||
|
version = "12.0.0.61"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "nvidia-nvjitlink" },
|
||||||
|
]
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/8b/ae/f417a75c0259e85c1d2f83ca4e960289a5f814ed0cea74d18c353d3e989d/nvidia_cufft-12.0.0.61-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2708c852ef8cd89d1d2068bdbece0aa188813a0c934db3779b9b1faa8442e5f5", size = 214053554 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a8/2f/7b57e29836ea8714f81e9898409196f47d772d5ddedddf1592eadb8ab743/nvidia_cufft-12.0.0.61-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6c44f692dce8fd5ffd3e3df134b6cdb9c2f72d99cf40b62c32dde45eea9ddad3", size = 214085489 },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "nvidia-cufile"
|
||||||
|
version = "1.15.1.6"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/3f/70/4f193de89a48b71714e74602ee14d04e4019ad36a5a9f20c425776e72cd6/nvidia_cufile-1.15.1.6-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:08a3ecefae5a01c7f5117351c64f17c7c62efa5fffdbe24fc7d298da19cd0b44", size = 1223672 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ab/73/cc4a14c9813a8a0d509417cf5f4bdaba76e924d58beb9864f5a7baceefbf/nvidia_cufile-1.15.1.6-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:bdc0deedc61f548bddf7733bdc216456c2fdb101d020e1ab4b88d232d5e2f6d1", size = 1136992 },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "nvidia-curand"
|
||||||
|
version = "10.4.0.35"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/1e/72/7c2ae24fb6b63a32e6ae5d241cc65263ea18d08802aaae087d9f013335a2/nvidia_curand-10.4.0.35-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:133df5a7509c3e292aaa2b477afd0194f06ce4ea24d714d616ff36439cee349a", size = 61962106 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a5/9f/be0a41ca4a4917abf5cb9ae0daff1a6060cc5de950aec0396de9f3b52bc5/nvidia_curand-10.4.0.35-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:1aee33a5da6e1db083fe2b90082def8915f30f3248d5896bcec36a579d941bfc", size = 59544258 },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "nvidia-cusolver"
|
||||||
|
version = "12.0.4.66"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "nvidia-cublas" },
|
||||||
|
{ name = "nvidia-cusparse" },
|
||||||
|
{ name = "nvidia-nvjitlink" },
|
||||||
|
]
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c8/c3/b30c9e935fc01e3da443ec0116ed1b2a009bb867f5324d3f2d7e533e776b/nvidia_cusolver-12.0.4.66-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:02c2457eaa9e39de20f880f4bd8820e6a1cfb9f9a34f820eb12a155aa5bc92d2", size = 223467760 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/5f/67/cba3777620cdacb99102da4042883709c41c709f4b6323c10781a9c3aa34/nvidia_cusolver-12.0.4.66-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:0a759da5dea5c0ea10fd307de75cdeb59e7ea4fcb8add0924859b944babf1112", size = 200941980 },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "nvidia-cusparse"
|
||||||
|
version = "12.6.3.3"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "nvidia-nvjitlink" },
|
||||||
|
]
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f8/94/5c26f33738ae35276672f12615a64bd008ed5be6d1ebcb23579285d960a9/nvidia_cusparse-12.6.3.3-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:80bcc4662f23f1054ee334a15c72b8940402975e0eab63178fc7e670aa59472c", size = 162155568 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/fa/18/623c77619c31d62efd55302939756966f3ecc8d724a14dab2b75f1508850/nvidia_cusparse-12.6.3.3-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2b3c89c88d01ee0e477cb7f82ef60a11a4bcd57b6b87c33f789350b59759360b", size = 145942937 },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "nvidia-cusparselt-cu13"
|
||||||
|
version = "0.8.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/46/10/8dcd1175260706a2fc92a16a52e306b71d4c1ea0b0cc4a9484183399818a/nvidia_cusparselt_cu13-0.8.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:400c6ed1cf6780fc6efedd64ec9f1345871767e6a1a0a552a1ea0578117ea77c", size = 220791277 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/fd/53/43b0d71f4e702fa9733f8b4571fdca50a8813f1e450b656c239beff12315/nvidia_cusparselt_cu13-0.8.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:25e30a8a7323935d4ad0340b95a0b69926eee755767e8e0b1cf8dd85b197d3fd", size = 169884119 },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "nvidia-nccl-cu13"
|
||||||
|
version = "2.28.9"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/39/55/1920646a2e43ffd4fc958536b276197ed740e9e0c54105b4bb3521591fc7/nvidia_nccl_cu13-2.28.9-py3-none-manylinux_2_18_aarch64.whl", hash = "sha256:01c873ba1626b54caa12272ed228dc5b2781545e0ae8ba3f432a8ef1c6d78643", size = 196561677 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b0/b4/878fefaad5b2bcc6fcf8d474a25e3e3774bc5133e4b58adff4d0bca238bc/nvidia_nccl_cu13-2.28.9-py3-none-manylinux_2_18_x86_64.whl", hash = "sha256:e4553a30f34195f3fa1da02a6da3d6337d28f2003943aa0a3d247bbc25fefc42", size = 196493177 },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "nvidia-nvjitlink"
|
||||||
|
version = "13.0.88"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/56/7a/123e033aaff487c77107195fa5a2b8686795ca537935a24efae476c41f05/nvidia_nvjitlink-13.0.88-py3-none-manylinux2010_x86_64.manylinux_2_12_x86_64.whl", hash = "sha256:13a74f429e23b921c1109976abefacc69835f2f433ebd323d3946e11d804e47b", size = 40713933 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ab/2c/93c5250e64df4f894f1cbb397c6fd71f79813f9fd79d7cd61de3f97b3c2d/nvidia_nvjitlink-13.0.88-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e931536ccc7d467a98ba1d8b89ff7fa7f1fa3b13f2b0069118cd7f47bff07d0c", size = 38768748 },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "nvidia-nvshmem-cu13"
|
||||||
|
version = "3.4.5"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/dc/0f/05cc9c720236dcd2db9c1ab97fff629e96821be2e63103569da0c9b72f19/nvidia_nvshmem_cu13-3.4.5-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:6dc2a197f38e5d0376ad52cd1a2a3617d3cdc150fd5966f4aee9bcebb1d68fe9", size = 60215947 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/3c/35/a9bf80a609e74e3b000fef598933235c908fcefcef9026042b8e6dfde2a9/nvidia_nvshmem_cu13-3.4.5-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:290f0a2ee94c9f3687a02502f3b9299a9f9fe826e6d0287ee18482e78d495b80", size = 60412546 },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "nvidia-nvtx"
|
||||||
|
version = "13.0.85"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c2/f3/d86c845465a2723ad7e1e5c36dcd75ddb82898b3f53be47ebd429fb2fa5d/nvidia_nvtx-13.0.85-py3-none-manylinux1_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:4936d1d6780fbe68db454f5e72a42ff64d1fd6397df9f363ae786930fd5c1cd4", size = 148047 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a8/64/3708a90d1ebe202ffdeb7185f878a3c84d15c2b2c31858da2ce0583e2def/nvidia_nvtx-13.0.85-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cb7780edb6b14107373c835bf8b72e7a178bac7367e23da7acb108f973f157a6", size = 148878 },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "omegaconf"
|
||||||
|
version = "2.3.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "antlr4-python3-runtime" },
|
||||||
|
{ name = "pyyaml" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/09/48/6388f1bb9da707110532cb70ec4d2822858ddfb44f1cdf1233c20a80ea4b/omegaconf-2.3.0.tar.gz", hash = "sha256:d5d4b6d29955cc50ad50c46dc269bcd92c6e00f5f90d23ab5fee7bfca4ba4cc7", size = 3298120 }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e3/94/1843518e420fa3ed6919835845df698c7e27e183cb997394e4a670973a65/omegaconf-2.3.0-py3-none-any.whl", hash = "sha256:7b4df175cdb08ba400f45cae3bdcae7ba8365db4d165fc65fd04b050ab63b46b", size = 79500 },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pillow"
|
||||||
|
version = "12.1.1"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/1f/42/5c74462b4fd957fcd7b13b04fb3205ff8349236ea74c7c375766d6c82288/pillow-12.1.1.tar.gz", hash = "sha256:9ad8fa5937ab05218e2b6a4cff30295ad35afd2f83ac592e68c0d871bb0fdbc4", size = 46980264 }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/07/d3/8df65da0d4df36b094351dce696f2989bec731d4f10e743b1c5f4da4d3bf/pillow-12.1.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ab323b787d6e18b3d91a72fc99b1a2c28651e4358749842b8f8dfacd28ef2052", size = 5262803 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d6/71/5026395b290ff404b836e636f51d7297e6c83beceaa87c592718747e670f/pillow-12.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:adebb5bee0f0af4909c30db0d890c773d1a92ffe83da908e2e9e720f8edf3984", size = 4657601 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b1/2e/1001613d941c67442f745aff0f7cc66dd8df9a9c084eb497e6a543ee6f7e/pillow-12.1.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:bb66b7cc26f50977108790e2456b7921e773f23db5630261102233eb355a3b79", size = 6234995 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/07/26/246ab11455b2549b9233dbd44d358d033a2f780fa9007b61a913c5b2d24e/pillow-12.1.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:aee2810642b2898bb187ced9b349e95d2a7272930796e022efaf12e99dccd293", size = 8045012 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b2/8b/07587069c27be7535ac1fe33874e32de118fbd34e2a73b7f83436a88368c/pillow-12.1.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a0b1cd6232e2b618adcc54d9882e4e662a089d5768cd188f7c245b4c8c44a397", size = 6349638 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ff/79/6df7b2ee763d619cda2fb4fea498e5f79d984dae304d45a8999b80d6cf5c/pillow-12.1.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7aac39bcf8d4770d089588a2e1dd111cbaa42df5a94be3114222057d68336bd0", size = 7041540 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/2c/5e/2ba19e7e7236d7529f4d873bdaf317a318896bac289abebd4bb00ef247f0/pillow-12.1.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ab174cd7d29a62dd139c44bf74b698039328f45cb03b4596c43473a46656b2f3", size = 6462613 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/03/03/31216ec124bb5c3dacd74ce8efff4cc7f52643653bad4825f8f08c697743/pillow-12.1.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:339ffdcb7cbeaa08221cd401d517d4b1fe7a9ed5d400e4a8039719238620ca35", size = 7166745 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/1f/e7/7c4552d80052337eb28653b617eafdef39adfb137c49dd7e831b8dc13bc5/pillow-12.1.1-cp312-cp312-win32.whl", hash = "sha256:5d1f9575a12bed9e9eedd9a4972834b08c97a352bd17955ccdebfeca5913fa0a", size = 6328823 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/3d/17/688626d192d7261bbbf98846fc98995726bddc2c945344b65bec3a29d731/pillow-12.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:21329ec8c96c6e979cd0dfd29406c40c1d52521a90544463057d2aaa937d66a6", size = 7033367 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ed/fe/a0ef1f73f939b0eca03ee2c108d0043a87468664770612602c63266a43c4/pillow-12.1.1-cp312-cp312-win_arm64.whl", hash = "sha256:af9a332e572978f0218686636610555ae3defd1633597be015ed50289a03c523", size = 2453811 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d5/11/6db24d4bd7685583caeae54b7009584e38da3c3d4488ed4cd25b439de486/pillow-12.1.1-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:d242e8ac078781f1de88bf823d70c1a9b3c7950a44cdf4b7c012e22ccbcd8e4e", size = 4062689 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/33/c0/ce6d3b1fe190f0021203e0d9b5b99e57843e345f15f9ef22fcd43842fd21/pillow-12.1.1-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:02f84dfad02693676692746df05b89cf25597560db2857363a208e393429f5e9", size = 4138535 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a0/c6/d5eb6a4fb32a3f9c21a8c7613ec706534ea1cf9f4b3663e99f0d83f6fca8/pillow-12.1.1-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:e65498daf4b583091ccbb2556c7000abf0f3349fcd57ef7adc9a84a394ed29f6", size = 3601364 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/14/a1/16c4b823838ba4c9c52c0e6bbda903a3fe5a1bdbf1b8eb4fff7156f3e318/pillow-12.1.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6c6db3b84c87d48d0088943bf33440e0c42370b99b1c2a7989216f7b42eede60", size = 5262561 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/bb/ad/ad9dc98ff24f485008aa5cdedaf1a219876f6f6c42a4626c08bc4e80b120/pillow-12.1.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8b7e5304e34942bf62e15184219a7b5ad4ff7f3bb5cca4d984f37df1a0e1aee2", size = 4657460 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/9e/1b/f1a4ea9a895b5732152789326202a82464d5254759fbacae4deea3069334/pillow-12.1.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:18e5bddd742a44b7e6b1e773ab5db102bd7a94c32555ba656e76d319d19c3850", size = 6232698 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/95/f4/86f51b8745070daf21fd2e5b1fe0eb35d4db9ca26e6d58366562fb56a743/pillow-12.1.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fc44ef1f3de4f45b50ccf9136999d71abb99dca7706bc75d222ed350b9fd2289", size = 8041706 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/29/9b/d6ecd956bb1266dd1045e995cce9b8d77759e740953a1c9aad9502a0461e/pillow-12.1.1-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5a8eb7ed8d4198bccbd07058416eeec51686b498e784eda166395a23eb99138e", size = 6346621 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/71/24/538bff45bde96535d7d998c6fed1a751c75ac7c53c37c90dc2601b243893/pillow-12.1.1-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:47b94983da0c642de92ced1702c5b6c292a84bd3a8e1d1702ff923f183594717", size = 7038069 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/94/0e/58cb1a6bc48f746bc4cb3adb8cabff73e2742c92b3bf7a220b7cf69b9177/pillow-12.1.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:518a48c2aab7ce596d3bf79d0e275661b846e86e4d0e7dec34712c30fe07f02a", size = 6460040 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/6c/57/9045cb3ff11eeb6c1adce3b2d60d7d299d7b273a2e6c8381a524abfdc474/pillow-12.1.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a550ae29b95c6dc13cf69e2c9dc5747f814c54eeb2e32d683e5e93af56caa029", size = 7164523 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/73/f2/9be9cb99f2175f0d4dbadd6616ce1bf068ee54a28277ea1bf1fbf729c250/pillow-12.1.1-cp313-cp313-win32.whl", hash = "sha256:a003d7422449f6d1e3a34e3dd4110c22148336918ddbfc6a32581cd54b2e0b2b", size = 6332552 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/3f/eb/b0834ad8b583d7d9d42b80becff092082a1c3c156bb582590fcc973f1c7c/pillow-12.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:344cf1e3dab3be4b1fa08e449323d98a2a3f819ad20f4b22e77a0ede31f0faa1", size = 7040108 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d5/7d/fc09634e2aabdd0feabaff4a32f4a7d97789223e7c2042fd805ea4b4d2c2/pillow-12.1.1-cp313-cp313-win_arm64.whl", hash = "sha256:5c0dd1636633e7e6a0afe7bf6a51a14992b7f8e60de5789018ebbdfae55b040a", size = 2453712 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/19/2a/b9d62794fc8a0dd14c1943df68347badbd5511103e0d04c035ffe5cf2255/pillow-12.1.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0330d233c1a0ead844fc097a7d16c0abff4c12e856c0b325f231820fee1f39da", size = 5264880 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/26/9d/e03d857d1347fa5ed9247e123fcd2a97b6220e15e9cb73ca0a8d91702c6e/pillow-12.1.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5dae5f21afb91322f2ff791895ddd8889e5e947ff59f71b46041c8ce6db790bc", size = 4660616 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f7/ec/8a6d22afd02570d30954e043f09c32772bfe143ba9285e2fdb11284952cd/pillow-12.1.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2e0c664be47252947d870ac0d327fea7e63985a08794758aa8af5b6cb6ec0c9c", size = 6269008 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/3d/1d/6d875422c9f28a4a361f495a5f68d9de4a66941dc2c619103ca335fa6446/pillow-12.1.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:691ab2ac363b8217f7d31b3497108fb1f50faab2f75dfb03284ec2f217e87bf8", size = 8073226 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a1/cd/134b0b6ee5eda6dc09e25e24b40fdafe11a520bc725c1d0bbaa5e00bf95b/pillow-12.1.1-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e9e8064fb1cc019296958595f6db671fba95209e3ceb0c4734c9baf97de04b20", size = 6380136 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/7a/a9/7628f013f18f001c1b98d8fffe3452f306a70dc6aba7d931019e0492f45e/pillow-12.1.1-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:472a8d7ded663e6162dafdf20015c486a7009483ca671cece7a9279b512fcb13", size = 7067129 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/1e/f8/66ab30a2193b277785601e82ee2d49f68ea575d9637e5e234faaa98efa4c/pillow-12.1.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:89b54027a766529136a06cfebeecb3a04900397a3590fd252160b888479517bf", size = 6491807 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/da/0b/a877a6627dc8318fdb84e357c5e1a758c0941ab1ddffdafd231983788579/pillow-12.1.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:86172b0831b82ce4f7877f280055892b31179e1576aa00d0df3bb1bbf8c3e524", size = 7190954 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/83/43/6f732ff85743cf746b1361b91665d9f5155e1483817f693f8d57ea93147f/pillow-12.1.1-cp313-cp313t-win32.whl", hash = "sha256:44ce27545b6efcf0fdbdceb31c9a5bdea9333e664cda58a7e674bb74608b3986", size = 6336441 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/3b/44/e865ef3986611bb75bfabdf94a590016ea327833f434558801122979cd0e/pillow-12.1.1-cp313-cp313t-win_amd64.whl", hash = "sha256:a285e3eb7a5a45a2ff504e31f4a8d1b12ef62e84e5411c6804a42197c1cf586c", size = 7045383 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a8/c6/f4fb24268d0c6908b9f04143697ea18b0379490cb74ba9e8d41b898bd005/pillow-12.1.1-cp313-cp313t-win_arm64.whl", hash = "sha256:cc7d296b5ea4d29e6570dabeaed58d31c3fea35a633a69679fb03d7664f43fb3", size = 2456104 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/03/d0/bebb3ffbf31c5a8e97241476c4cf8b9828954693ce6744b4a2326af3e16b/pillow-12.1.1-cp314-cp314-ios_13_0_arm64_iphoneos.whl", hash = "sha256:417423db963cb4be8bac3fc1204fe61610f6abeed1580a7a2cbb2fbda20f12af", size = 4062652 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/2d/c0/0e16fb0addda4851445c28f8350d8c512f09de27bbb0d6d0bbf8b6709605/pillow-12.1.1-cp314-cp314-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:b957b71c6b2387610f556a7eb0828afbe40b4a98036fc0d2acfa5a44a0c2036f", size = 4138823 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/6b/fb/6170ec655d6f6bb6630a013dd7cf7bc218423d7b5fa9071bf63dc32175ae/pillow-12.1.1-cp314-cp314-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:097690ba1f2efdeb165a20469d59d8bb03c55fb6621eb2041a060ae8ea3e9642", size = 3601143 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/59/04/dc5c3f297510ba9a6837cbb318b87dd2b8f73eb41a43cc63767f65cb599c/pillow-12.1.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:2815a87ab27848db0321fb78c7f0b2c8649dee134b7f2b80c6a45c6831d75ccd", size = 5266254 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/05/30/5db1236b0d6313f03ebf97f5e17cda9ca060f524b2fcc875149a8360b21c/pillow-12.1.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:f7ed2c6543bad5a7d5530eb9e78c53132f93dfa44a28492db88b41cdab885202", size = 4657499 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/6f/18/008d2ca0eb612e81968e8be0bbae5051efba24d52debf930126d7eaacbba/pillow-12.1.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:652a2c9ccfb556235b2b501a3a7cf3742148cd22e04b5625c5fe057ea3e3191f", size = 6232137 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/70/f1/f14d5b8eeb4b2cd62b9f9f847eb6605f103df89ef619ac68f92f748614ea/pillow-12.1.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d6e4571eedf43af33d0fc233a382a76e849badbccdf1ac438841308652a08e1f", size = 8042721 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/5a/d6/17824509146e4babbdabf04d8171491fa9d776f7061ff6e727522df9bd03/pillow-12.1.1-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b574c51cf7d5d62e9be37ba446224b59a2da26dc4c1bb2ecbe936a4fb1a7cb7f", size = 6347798 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d1/ee/c85a38a9ab92037a75615aba572c85ea51e605265036e00c5b67dfafbfe2/pillow-12.1.1-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a37691702ed687799de29a518d63d4682d9016932db66d4e90c345831b02fb4e", size = 7039315 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ec/f3/bc8ccc6e08a148290d7523bde4d9a0d6c981db34631390dc6e6ec34cacf6/pillow-12.1.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:f95c00d5d6700b2b890479664a06e754974848afaae5e21beb4d83c106923fd0", size = 6462360 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f6/ab/69a42656adb1d0665ab051eec58a41f169ad295cf81ad45406963105408f/pillow-12.1.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:559b38da23606e68681337ad74622c4dbba02254fc9cb4488a305dd5975c7eeb", size = 7165438 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/02/46/81f7aa8941873f0f01d4b55cc543b0a3d03ec2ee30d617a0448bf6bd6dec/pillow-12.1.1-cp314-cp314-win32.whl", hash = "sha256:03edcc34d688572014ff223c125a3f77fb08091e4607e7745002fc214070b35f", size = 6431503 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/40/72/4c245f7d1044b67affc7f134a09ea619d4895333d35322b775b928180044/pillow-12.1.1-cp314-cp314-win_amd64.whl", hash = "sha256:50480dcd74fa63b8e78235957d302d98d98d82ccbfac4c7e12108ba9ecbdba15", size = 7176748 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e4/ad/8a87bdbe038c5c698736e3348af5c2194ffb872ea52f11894c95f9305435/pillow-12.1.1-cp314-cp314-win_arm64.whl", hash = "sha256:5cb1785d97b0c3d1d1a16bc1d710c4a0049daefc4935f3a8f31f827f4d3d2e7f", size = 2544314 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/6c/9d/efd18493f9de13b87ede7c47e69184b9e859e4427225ea962e32e56a49bc/pillow-12.1.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:1f90cff8aa76835cba5769f0b3121a22bd4eb9e6884cfe338216e557a9a548b8", size = 5268612 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f8/f1/4f42eb2b388eb2ffc660dcb7f7b556c1015c53ebd5f7f754965ef997585b/pillow-12.1.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1f1be78ce9466a7ee64bfda57bdba0f7cc499d9794d518b854816c41bf0aa4e9", size = 4660567 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/01/54/df6ef130fa43e4b82e32624a7b821a2be1c5653a5fdad8469687a7db4e00/pillow-12.1.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:42fc1f4677106188ad9a55562bbade416f8b55456f522430fadab3cef7cd4e60", size = 6269951 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a9/48/618752d06cc44bb4aae8ce0cd4e6426871929ed7b46215638088270d9b34/pillow-12.1.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:98edb152429ab62a1818039744d8fbb3ccab98a7c29fc3d5fcef158f3f1f68b7", size = 8074769 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c3/bd/f1d71eb39a72fa088d938655afba3e00b38018d052752f435838961127d8/pillow-12.1.1-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d470ab1178551dd17fdba0fef463359c41aaa613cdcd7ff8373f54be629f9f8f", size = 6381358 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/64/ef/c784e20b96674ed36a5af839305f55616f8b4f8aa8eeccf8531a6e312243/pillow-12.1.1-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6408a7b064595afcab0a49393a413732a35788f2a5092fdc6266952ed67de586", size = 7068558 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/73/cb/8059688b74422ae61278202c4e1ad992e8a2e7375227be0a21c6b87ca8d5/pillow-12.1.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5d8c41325b382c07799a3682c1c258469ea2ff97103c53717b7893862d0c98ce", size = 6493028 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c6/da/e3c008ed7d2dd1f905b15949325934510b9d1931e5df999bb15972756818/pillow-12.1.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:c7697918b5be27424e9ce568193efd13d925c4481dd364e43f5dff72d33e10f8", size = 7191940 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/01/4a/9202e8d11714c1fc5951f2e1ef362f2d7fbc595e1f6717971d5dd750e969/pillow-12.1.1-cp314-cp314t-win32.whl", hash = "sha256:d2912fd8114fc5545aa3a4b5576512f64c55a03f3ebcca4c10194d593d43ea36", size = 6438736 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f3/ca/cbce2327eb9885476b3957b2e82eb12c866a8b16ad77392864ad601022ce/pillow-12.1.1-cp314-cp314t-win_amd64.whl", hash = "sha256:4ceb838d4bd9dab43e06c363cab2eebf63846d6a4aeaea283bbdfd8f1a8ed58b", size = 7182894 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ec/d2/de599c95ba0a973b94410477f8bf0b6f0b5e67360eb89bcb1ad365258beb/pillow-12.1.1-cp314-cp314t-win_arm64.whl", hash = "sha256:7b03048319bfc6170e93bd60728a1af51d3dd7704935feb228c4d4faab35d334", size = 2546446 },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pyyaml"
|
||||||
|
version = "6.0.3"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960 }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac", size = 181814 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310", size = 173809 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7", size = 766454 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788", size = 836355 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5", size = 794175 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764", size = 755228 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35", size = 789194 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac", size = 156429 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3", size = 143912 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3", size = 189108 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba", size = 183641 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c", size = 831901 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702", size = 861132 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c", size = 839261 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065", size = 805272 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9", size = 174062 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341 },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "setuptools"
|
||||||
|
version = "81.0.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/0d/1c/73e719955c59b8e424d015ab450f51c0af856ae46ea2da83eba51cc88de1/setuptools-81.0.0.tar.gz", hash = "sha256:487b53915f52501f0a79ccfd0c02c165ffe06631443a886740b91af4b7a5845a", size = 1198299 }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e1/e3/c164c88b2e5ce7b24d667b9bd83589cf4f3520d97cad01534cd3c4f55fdb/setuptools-81.0.0-py3-none-any.whl", hash = "sha256:fdd925d5c5d9f62e4b74b30d6dd7828ce236fd6ed998a08d81de62ce5a6310d6", size = 1062021 },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "sympy"
|
||||||
|
version = "1.14.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "mpmath" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/83/d3/803453b36afefb7c2bb238361cd4ae6125a569b4db67cd9e79846ba2d68c/sympy-1.14.0.tar.gz", hash = "sha256:d3d3fe8df1e5a0b42f0e7bdf50541697dbe7d23746e894990c030e2b05e72517", size = 7793921 }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a2/09/77d55d46fd61b4a135c444fc97158ef34a095e5681d0a6c10b75bf356191/sympy-1.14.0-py3-none-any.whl", hash = "sha256:e091cc3e99d2141a0ba2847328f5479b05d94a6635cb96148ccb3f34671bd8f5", size = 6299353 },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "torch"
|
||||||
|
version = "2.11.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "cuda-bindings", marker = "sys_platform == 'linux'" },
|
||||||
|
{ name = "cuda-toolkit", extra = ["cublas", "cudart", "cufft", "cufile", "cupti", "curand", "cusolver", "cusparse", "nvjitlink", "nvrtc", "nvtx"], marker = "sys_platform == 'linux'" },
|
||||||
|
{ name = "filelock" },
|
||||||
|
{ name = "fsspec" },
|
||||||
|
{ name = "jinja2" },
|
||||||
|
{ name = "networkx" },
|
||||||
|
{ name = "nvidia-cudnn-cu13", marker = "sys_platform == 'linux'" },
|
||||||
|
{ name = "nvidia-cusparselt-cu13", marker = "sys_platform == 'linux'" },
|
||||||
|
{ name = "nvidia-nccl-cu13", marker = "sys_platform == 'linux'" },
|
||||||
|
{ name = "nvidia-nvshmem-cu13", marker = "sys_platform == 'linux'" },
|
||||||
|
{ name = "setuptools" },
|
||||||
|
{ name = "sympy" },
|
||||||
|
{ name = "triton", marker = "sys_platform == 'linux'" },
|
||||||
|
{ name = "typing-extensions" },
|
||||||
|
]
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/6f/8b/69e3008d78e5cee2b30183340cc425081b78afc5eff3d080daab0adda9aa/torch-2.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4b5866312ee6e52ea625cd211dcb97d6a2cdc1131a5f15cc0d87eec948f6dd34", size = 80606338 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/13/16/42e5915ebe4868caa6bac83a8ed59db57f12e9a61b7d749d584776ed53d5/torch-2.11.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:f99924682ef0aa6a4ab3b1b76f40dc6e273fca09f367d15a524266db100a723f", size = 419731115 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/1a/c9/82638ef24d7877510f83baf821f5619a61b45568ce21c0a87a91576510aa/torch-2.11.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:0f68f4ac6d95d12e896c3b7a912b5871619542ec54d3649cf48cc1edd4dd2756", size = 530712279 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/1c/ff/6756f1c7ee302f6d202120e0f4f05b432b839908f9071157302cedfc5232/torch-2.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:fbf39280699d1b869f55eac536deceaa1b60bd6788ba74f399cc67e60a5fab10", size = 114556047 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/87/89/5ea6722763acee56b045435fb84258db7375c48165ec8be7880ab2b281c5/torch-2.11.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1e6debd97ccd3205bbb37eb806a9d8219e1139d15419982c09e23ef7d4369d18", size = 80606801 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/32/d1/8ed2173589cbfe744ed54e5a73efc107c0085ba5777ee93a5f4c1ab90553/torch-2.11.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:63a68fa59de8f87acc7e85a5478bb2dddbb3392b7593ec3e78827c793c4b73fd", size = 419732382 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/3d/e1/b73f7c575a4b8f87a5928f50a1e35416b5e27295d8be9397d5293e7e8d4c/torch-2.11.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:cc89b9b173d9adfab59fd227f0ab5e5516d9a52b658ae41d64e59d2e55a418db", size = 530711509 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/66/82/3e3fcdd388fbe54e29fd3f991f36846ff4ac90b0d0181e9c8f7236565f82/torch-2.11.0-cp313-cp313-win_amd64.whl", hash = "sha256:4dda3b3f52d121063a731ddb835f010dc137b920d7fec2778e52f60d8e4bf0cd", size = 114555842 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/db/38/8ac78069621b8c2b4979c2f96dc8409ef5e9c4189f6aac629189a78677ca/torch-2.11.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:8b394322f49af4362d4f80e424bcaca7efcd049619af03a4cf4501520bdf0fb4", size = 80959574 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/6d/6c/56bfb37073e7136e6dd86bfc6af7339946dd684e0ecf2155ac0eee687ae1/torch-2.11.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:2658f34ce7e2dabf4ec73b45e2ca68aedad7a5be87ea756ad656eaf32bf1e1ea", size = 419732324 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/07/f4/1b666b6d61d3394cca306ea543ed03a64aad0a201b6cd159f1d41010aeb1/torch-2.11.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:98bb213c3084cfe176302949bdc360074b18a9da7ab59ef2edc9d9f742504778", size = 530596026 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/48/6b/30d1459fa7e4b67e9e3fe1685ca1d8bb4ce7c62ef436c3a615963c6c866c/torch-2.11.0-cp313-cp313t-win_amd64.whl", hash = "sha256:a97b94bbf62992949b4730c6cd2cc9aee7b335921ee8dc207d930f2ed09ae2db", size = 114793702 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/26/0d/8603382f61abd0db35841148ddc1ffd607bf3100b11c6e1dab6d2fc44e72/torch-2.11.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:01018087326984a33b64e04c8cb5c2795f9120e0d775ada1f6638840227b04d7", size = 80573442 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c7/86/7cd7c66cb9cec6be330fff36db5bd0eef386d80c031b581ec81be1d4b26c/torch-2.11.0-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:2bb3cc54bd0dea126b0060bb1ec9de0f9c7f7342d93d436646516b0330cd5be7", size = 419749385 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/47/e8/b98ca2d39b2e0e4730c0ee52537e488e7008025bc77ca89552ff91021f7c/torch-2.11.0-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:4dc8b3809469b6c30b411bb8c4cad3828efd26236153d9beb6a3ec500f211a60", size = 530716756 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/78/88/d4a4cda8362f8a30d1ed428564878c3cafb0d87971fbd3947d4c84552095/torch-2.11.0-cp314-cp314-win_amd64.whl", hash = "sha256:2b4e811728bd0cc58fb2b0948fe939a1ee2bf1422f6025be2fca4c7bd9d79718", size = 114552300 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/bf/46/4419098ed6d801750f26567b478fc185c3432e11e2cad712bc6b4c2ab0d0/torch-2.11.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:8245477871c3700d4370352ffec94b103cfcb737229445cf9946cddb7b2ca7cd", size = 80959460 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/fd/66/54a56a4a6ceaffb567231994a9745821d3af922a854ed33b0b3a278e0a99/torch-2.11.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:ab9a8482f475f9ba20e12db84b0e55e2f58784bdca43a854a6ccd3fd4b9f75e6", size = 419735835 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b1/e7/0b6665f533aa9e337662dc190425abc0af1fe3234088f4454c52393ded61/torch-2.11.0-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:563ed3d25542d7e7bbc5b235ccfacfeb97fb470c7fee257eae599adb8005c8a2", size = 530613405 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/cf/bf/c8d12a2c86dbfd7f40fb2f56fbf5a505ccf2d9ce131eb559dfc7c51e1a04/torch-2.11.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b2a43985ff5ef6ddd923bbcf99943e5f58059805787c5c9a2622bf05ca2965b0", size = 114792991 },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "triton"
|
||||||
|
version = "3.6.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/17/5d/08201db32823bdf77a0e2b9039540080b2e5c23a20706ddba942924ebcd6/triton-3.6.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:374f52c11a711fd062b4bfbb201fd9ac0a5febd28a96fb41b4a0f51dde3157f4", size = 176128243 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ab/a8/cdf8b3e4c98132f965f88c2313a4b493266832ad47fb52f23d14d4f86bb5/triton-3.6.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:74caf5e34b66d9f3a429af689c1c7128daba1d8208df60e81106b115c00d6fca", size = 188266850 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/3c/12/34d71b350e89a204c2c7777a9bba0dcf2f19a5bfdd70b57c4dbc5ffd7154/triton-3.6.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:448e02fe6dc898e9e5aa89cf0ee5c371e99df5aa5e8ad976a80b93334f3494fd", size = 176133521 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f9/0b/37d991d8c130ce81a8728ae3c25b6e60935838e9be1b58791f5997b24a54/triton-3.6.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:10c7f76c6e72d2ef08df639e3d0d30729112f47a56b0c81672edc05ee5116ac9", size = 188289450 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ce/4e/41b0c8033b503fd3cfcd12392cdd256945026a91ff02452bef40ec34bee7/triton-3.6.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1722e172d34e32abc3eb7711d0025bb69d7959ebea84e3b7f7a341cd7ed694d6", size = 176276087 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/35/f8/9c66bfc55361ec6d0e4040a0337fb5924ceb23de4648b8a81ae9d33b2b38/triton-3.6.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d002e07d7180fd65e622134fbd980c9a3d4211fb85224b56a0a0efbd422ab72f", size = 188400296 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/49/55/5ecf0dcaa0f2fbbd4420f7ef227ee3cb172e91e5fede9d0ecaddc43363b4/triton-3.6.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ef5523241e7d1abca00f1d240949eebdd7c673b005edbbce0aca95b8191f1d43", size = 176138577 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/df/3d/9e7eee57b37c80cec63322c0231bb6da3cfe535a91d7a4d64896fcb89357/triton-3.6.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a17a5d5985f0ac494ed8a8e54568f092f7057ef60e1b0fa09d3fd1512064e803", size = 188273063 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/48/db/56ee649cab5eaff4757541325aca81f52d02d4a7cd3506776cad2451e060/triton-3.6.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0b3a97e8ed304dfa9bd23bb41ca04cdf6b2e617d5e782a8653d616037a5d537d", size = 176274804 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f6/56/6113c23ff46c00aae423333eb58b3e60bdfe9179d542781955a5e1514cb3/triton-3.6.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:46bd1c1af4b6704e554cad2eeb3b0a6513a980d470ccfa63189737340c7746a7", size = 188397994 },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "typing-extensions"
|
||||||
|
version = "4.15.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391 }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614 },
|
||||||
|
]
|
||||||
Reference in New Issue
Block a user