Upload to Main

This commit is contained in:
张菲
2025-10-07 22:42:55 +08:00
commit d3ddab7c5d
218 changed files with 125815 additions and 0 deletions

View File

@@ -0,0 +1,8 @@
from .cifar10 import *
from .cifar100 import *
from .fashionmnist import *
from .imagenet import *
from .mnist import *
from .qmnist import *
from .svhn import *
from .tinyimagenet import *

View File

@@ -0,0 +1,19 @@
from torchvision import datasets, transforms
from torch import tensor, long
def CIFAR10(data_path):
channel = 3
im_size = (32, 32)
num_classes = 10
mean = [0.4914, 0.4822, 0.4465]
std = [0.2470, 0.2435, 0.2616]
transform = transforms.Compose([transforms.ToTensor(), transforms.Normalize(mean=mean, std=std)])
dst_train = datasets.CIFAR10(data_path, train=True, download=False, transform=transform)
dst_test = datasets.CIFAR10(data_path, train=False, download=False, transform=transform)
class_names = dst_train.classes
dst_train.targets = tensor(dst_train.targets, dtype=long)
dst_test.targets = tensor(dst_test.targets, dtype=long)
return channel, im_size, num_classes, class_names, mean, std, dst_train, dst_test

View File

@@ -0,0 +1,17 @@
from torchvision import datasets, transforms
from torch import tensor, long
def CIFAR100(data_path):
channel = 3
im_size = (32, 32)
num_classes = 100
mean = [0.5071, 0.4865, 0.4409]
std = [0.2673, 0.2564, 0.2762]
transform = transforms.Compose([transforms.ToTensor(), transforms.Normalize(mean=mean, std=std)])
dst_train = datasets.CIFAR100(data_path, train=True, download=True, transform=transform)
dst_test = datasets.CIFAR100(data_path, train=False, download=True, transform=transform)
class_names = dst_train.classes
dst_train.targets = tensor(dst_train.targets, dtype=long)
dst_test.targets = tensor(dst_test.targets, dtype=long)
return channel, im_size, num_classes, class_names, mean, std, dst_train, dst_test

View File

@@ -0,0 +1,14 @@
from torchvision import datasets, transforms
def FashionMNIST(data_path):
channel = 1
im_size = (28, 28)
num_classes = 10
mean = [0.2861]
std = [0.3530]
transform = transforms.Compose([transforms.ToTensor(), transforms.Normalize(mean=mean, std=std)])
dst_train = datasets.FashionMNIST(data_path, train=True, download=True, transform=transform)
dst_test = datasets.FashionMNIST(data_path, train=False, download=True, transform=transform)
class_names = dst_train.classes
return channel, im_size, num_classes, class_names, mean, std, dst_train, dst_test

View File

@@ -0,0 +1,27 @@
from torchvision import datasets, transforms
from torch import tensor, long
def ImageNet(data_path):
channel = 3
im_size = (224, 224)
num_classes = 1000
mean = [0.485, 0.456, 0.406]
std = [0.229, 0.224, 0.225]
normalize = transforms.Normalize(mean, std)
dst_train = datasets.ImageNet(data_path, split="train", transform=transforms.Compose([
transforms.Resize(256),
transforms.CenterCrop(224),
transforms.ToTensor(),
normalize,
]))
dst_test = datasets.ImageNet(data_path, split="val", transform=transforms.Compose([
transforms.Resize(256),
transforms.CenterCrop(224),
transforms.ToTensor(),
normalize,
]))
class_names = dst_train.classes
dst_train.targets = tensor(dst_train.targets, dtype=long)
dst_test.targets = tensor(dst_test.targets, dtype=long)
return channel, im_size, num_classes, class_names, mean, std, dst_train, dst_test

View File

@@ -0,0 +1,25 @@
from torchvision import datasets, transforms
import numpy as np
def MNIST(data_path, permuted=False, permutation_seed=None):
channel = 1
im_size = (28, 28)
num_classes = 10
mean = [0.1307]
std = [0.3081]
transform = transforms.Compose([transforms.ToTensor(), transforms.Normalize(mean=mean, std=std)])
if permuted:
np.random.seed(permutation_seed)
pixel_permutation = np.random.permutation(28 * 28)
transform = transforms.Compose(
[transform, transforms.Lambda(lambda x: x.view(-1, 1)[pixel_permutation].view(1, 28, 28))])
dst_train = datasets.MNIST(data_path, train=True, download=True, transform=transform)
dst_test = datasets.MNIST(data_path, train=False, download=True, transform=transform)
class_names = [str(c) for c in range(num_classes)]
return channel, im_size, num_classes, class_names, mean, std, dst_train, dst_test
def permutedMNIST(data_path, permutation_seed=None):
return MNIST(data_path, True, permutation_seed)

View File

@@ -0,0 +1,18 @@
from torchvision import datasets, transforms
def QMNIST(data_path):
channel = 1
im_size = (28, 28)
num_classes = 10
mean = [0.1308]
std = [0.3088]
transform = transforms.Compose([transforms.ToTensor(), transforms.Normalize(mean=mean, std=std)])
dst_train = datasets.QMNIST(data_path, train=True, download=True, transform=transform)
dst_test = datasets.QMNIST(data_path, train=False, download=True, transform=transform)
class_names = [str(c) for c in range(num_classes)]
dst_train.targets = dst_train.targets[:, 0]
dst_test.targets = dst_test.targets[:, 0]
dst_train.compat = False
dst_test.compat = False
return channel, im_size, num_classes, class_names, mean, std, dst_train, dst_test

19
deepcore/datasets/svhn.py Normal file
View File

@@ -0,0 +1,19 @@
from torchvision import datasets, transforms
from torch import tensor, long
def SVHN(data_path):
channel = 3
im_size = (32, 32)
num_classes = 10
mean = [0.4377, 0.4438, 0.4728]
std = [0.1980, 0.2010, 0.1970]
transform = transforms.Compose([transforms.ToTensor(), transforms.Normalize(mean=mean, std=std)])
dst_train = datasets.SVHN(data_path, split='train', download=True, transform=transform)
dst_test = datasets.SVHN(data_path, split='test', download=True, transform=transform)
class_names = [str(c) for c in range(num_classes)]
dst_train.classes = list(class_names)
dst_test.classes = list(class_names)
dst_train.targets = tensor(dst_train.labels, dtype=long)
dst_test.targets = tensor(dst_test.labels, dtype=long)
return channel, im_size, num_classes, class_names, mean, std, dst_train, dst_test

View File

@@ -0,0 +1,35 @@
from torchvision import datasets, transforms
import os
import requests
import zipfile
def TinyImageNet(data_path, downsize=True):
if not os.path.exists(os.path.join(data_path, "tiny-imagenet-200")):
url = "http://cs231n.stanford.edu/tiny-imagenet-200.zip" # 248MB
print("Downloading Tiny-ImageNet")
r = requests.get(url, stream=True)
with open(os.path.join(data_path, "tiny-imagenet-200.zip"), "wb") as f:
for chunk in r.iter_content(chunk_size=1024):
if chunk:
f.write(chunk)
print("Unziping Tiny-ImageNet")
with zipfile.ZipFile(os.path.join(data_path, "tiny-imagenet-200.zip")) as zf:
zf.extractall(path=data_path)
channel = 3
im_size = (32, 32) if downsize else (64, 64)
num_classes = 200
mean = (0.4802, 0.4481, 0.3975)
std = (0.2770, 0.2691, 0.2821)
transform = transforms.Compose([transforms.ToTensor(), transforms.Normalize(mean=mean, std=std)])
if downsize:
transform = transforms.Compose([transforms.Resize(32), transform])
dst_train = datasets.ImageFolder(root=os.path.join(data_path, 'tiny-imagenet-200/train'), transform=transform)
dst_test = datasets.ImageFolder(root=os.path.join(data_path, 'tiny-imagenet-200/test'), transform=transform)
class_names = dst_train.classes
return channel, im_size, num_classes, class_names, mean, std, dst_train, dst_test