Skip to content

Commit 9930a58

Browse files
authored
update 2.0 public api in dataset&framework (#31985)
1 parent f1bc322 commit 9930a58

File tree

19 files changed

+38
-98
lines changed

19 files changed

+38
-98
lines changed

python/paddle/__init__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -265,6 +265,7 @@
265265

266266
from .framework import set_default_dtype #DEFINE_ALIAS
267267
from .framework import get_default_dtype #DEFINE_ALIAS
268+
from .framework import set_grad_enabled #DEFINE_ALIAS
268269

269270
from .tensor.search import index_sample #DEFINE_ALIAS
270271
from .tensor.stat import mean #DEFINE_ALIAS

python/paddle/dataset/__init__.py

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -15,18 +15,18 @@
1515
Dataset package.
1616
"""
1717

18-
import paddle.dataset.mnist
19-
import paddle.dataset.imikolov
20-
import paddle.dataset.imdb
21-
import paddle.dataset.cifar
22-
import paddle.dataset.movielens
23-
import paddle.dataset.conll05
24-
import paddle.dataset.uci_housing
25-
import paddle.dataset.wmt14
26-
import paddle.dataset.wmt16
27-
import paddle.dataset.flowers
28-
import paddle.dataset.voc2012
29-
import paddle.dataset.image
18+
import paddle.dataset.mnist # noqa: F401
19+
import paddle.dataset.imikolov # noqa: F401
20+
import paddle.dataset.imdb # noqa: F401
21+
import paddle.dataset.cifar # noqa: F401
22+
import paddle.dataset.movielens # noqa: F401
23+
import paddle.dataset.conll05 # noqa: F401
24+
import paddle.dataset.uci_housing # noqa: F401
25+
import paddle.dataset.wmt14 # noqa: F401
26+
import paddle.dataset.wmt16 # noqa: F401
27+
import paddle.dataset.flowers # noqa: F401
28+
import paddle.dataset.voc2012 # noqa: F401
29+
import paddle.dataset.image # noqa: F401
3030

3131
# set __all__ as empty for not showing APIs under paddle.dataset
3232
__all__ = []

python/paddle/dataset/cifar.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -37,8 +37,6 @@
3737
import six
3838
from six.moves import cPickle as pickle
3939

40-
__all__ = ['train100', 'test100', 'train10', 'test10']
41-
4240
URL_PREFIX = 'https://dataset.bj.bcebos.com/cifar/'
4341
CIFAR10_URL = URL_PREFIX + 'cifar-10-python.tar.gz'
4442
CIFAR10_MD5 = 'c58f30108f718f92721af3b95e74349a'

python/paddle/dataset/common.py

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -26,14 +26,6 @@
2626
import six.moves.cPickle as pickle
2727
import glob
2828

29-
__all__ = [
30-
'DATA_HOME',
31-
'download',
32-
'md5file',
33-
'split',
34-
'cluster_files_reader',
35-
]
36-
3729
HOME = os.path.expanduser('~')
3830
DATA_HOME = os.path.join(HOME, '.cache', 'paddle', 'dataset')
3931

python/paddle/dataset/conll05.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -30,8 +30,6 @@
3030
import paddle.utils.deprecated as deprecated
3131
from six.moves import zip, range
3232

33-
__all__ = ['test, get_dict', 'get_embedding']
34-
3533
DATA_URL = 'http://paddlemodels.bj.bcebos.com/conll05st/conll05st-tests.tar.gz'
3634
DATA_MD5 = '387719152ae52d60422c016e92a742fc'
3735
WORDDICT_URL = 'http://paddlemodels.bj.bcebos.com/conll05st%2FwordDict.txt'

python/paddle/dataset/flowers.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,12 @@
3535
import functools
3636
from .common import download
3737
import tarfile
38-
from paddle.dataset.image import *
38+
39+
from paddle.dataset.image import load_image_bytes
40+
from paddle.dataset.image import load_image
41+
from paddle.dataset.image import simple_transform
42+
from paddle.dataset.image import batch_images_from_tar
43+
3944
from paddle.reader import map_readers, xmap_readers
4045
from paddle import compat as cpt
4146
import paddle.utils.deprecated as deprecated
@@ -45,7 +50,6 @@
4550
import six
4651
from six.moves import cPickle as pickle
4752
from paddle.utils import try_import
48-
__all__ = ['train', 'test', 'valid']
4953

5054
DATA_URL = 'http://paddlemodels.bj.bcebos.com/flowers/102flowers.tgz'
5155
LABEL_URL = 'http://paddlemodels.bj.bcebos.com/flowers/imagelabels.mat'

python/paddle/dataset/image.py

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -58,12 +58,6 @@
5858
import tarfile
5959
import six.moves.cPickle as pickle
6060

61-
__all__ = [
62-
"load_image_bytes", "load_image", "resize_short", "to_chw", "center_crop",
63-
"random_crop", "left_right_flip", "simple_transform", "load_and_transform",
64-
"batch_images_from_tar"
65-
]
66-
6761

6862
def _check_cv2():
6963
if cv2 is None:

python/paddle/dataset/imdb.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -30,8 +30,6 @@
3030
import string
3131
import six
3232

33-
__all__ = ['build_dict', 'train', 'test']
34-
3533
#URL = 'http://ai.stanford.edu/%7Eamaas/data/sentiment/aclImdb_v1.tar.gz'
3634
URL = 'https://dataset.bj.bcebos.com/imdb%2FaclImdb_v1.tar.gz'
3735
MD5 = '7c2ac02c03563afcf9b574c7e56c153a'

python/paddle/dataset/imikolov.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -27,8 +27,6 @@
2727
import tarfile
2828
import six
2929

30-
__all__ = ['train', 'test', 'build_dict']
31-
3230
#URL = 'http://www.fit.vutbr.cz/~imikolov/rnnlm/simple-examples.tgz'
3331
URL = 'https://dataset.bj.bcebos.com/imikolov%2Fsimple-examples.tgz'
3432
MD5 = '30177ea32e27c525793142b6bf2c8e2d'

python/paddle/dataset/mnist.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,6 @@
2626
import numpy
2727
import struct
2828
from six.moves import range
29-
__all__ = ['train', 'test']
3029

3130
URL_PREFIX = 'https://dataset.bj.bcebos.com/mnist/'
3231
TEST_IMAGE_URL = URL_PREFIX + 't10k-images-idx3-ubyte.gz'

0 commit comments

Comments
 (0)