@@ -60,8 +60,7 @@ def hf_token(hf_api: HfApi):
6060@pytest .fixture
6161def cleanup_repo (hf_api ):
6262 def _cleanup_repo (repo_id ):
63- organization , name = repo_id .split ("/" )
64- delete_repo (hf_api = hf_api , name = name , organization = organization , token = CI_HUB_USER_TOKEN , repo_type = "dataset" )
63+ delete_repo (hf_api , repo_id , token = CI_HUB_USER_TOKEN , repo_type = "dataset" )
6564
6665 return _cleanup_repo
6766
@@ -81,8 +80,8 @@ def _temporary_repo(repo_id):
8180@pytest .fixture (scope = "session" )
8281def hf_private_dataset_repo_txt_data_ (hf_api : HfApi , hf_token , text_file ):
8382 repo_name = f"repo_txt_data-{ int (time .time () * 10e3 )} "
84- create_repo (hf_api , repo_name , token = hf_token , organization = CI_HUB_USER , repo_type = "dataset" , private = True )
8583 repo_id = f"{ CI_HUB_USER } /{ repo_name } "
84+ create_repo (hf_api , repo_id , token = hf_token , repo_type = "dataset" , private = True )
8685 hf_api .upload_file (
8786 token = hf_token ,
8887 path_or_fileobj = str (text_file ),
@@ -92,7 +91,7 @@ def hf_private_dataset_repo_txt_data_(hf_api: HfApi, hf_token, text_file):
9291 )
9392 yield repo_id
9493 try :
95- delete_repo (hf_api , repo_name , token = hf_token , organization = CI_HUB_USER , repo_type = "dataset" )
94+ delete_repo (hf_api , repo_id , token = hf_token , repo_type = "dataset" )
9695 except (requests .exceptions .HTTPError , ValueError ): # catch http error and token invalid error
9796 pass
9897
@@ -107,8 +106,8 @@ def hf_private_dataset_repo_txt_data(hf_private_dataset_repo_txt_data_):
107106@pytest .fixture (scope = "session" )
108107def hf_private_dataset_repo_zipped_txt_data_ (hf_api : HfApi , hf_token , zip_csv_with_dir_path ):
109108 repo_name = f"repo_zipped_txt_data-{ int (time .time () * 10e3 )} "
110- create_repo (hf_api , repo_name , token = hf_token , organization = CI_HUB_USER , repo_type = "dataset" , private = True )
111109 repo_id = f"{ CI_HUB_USER } /{ repo_name } "
110+ create_repo (hf_api , repo_id , token = hf_token , repo_type = "dataset" , private = True )
112111 hf_api .upload_file (
113112 token = hf_token ,
114113 path_or_fileobj = str (zip_csv_with_dir_path ),
@@ -118,7 +117,7 @@ def hf_private_dataset_repo_zipped_txt_data_(hf_api: HfApi, hf_token, zip_csv_wi
118117 )
119118 yield repo_id
120119 try :
121- delete_repo (hf_api , repo_name , token = hf_token , organization = CI_HUB_USER , repo_type = "dataset" )
120+ delete_repo (hf_api , repo_id , token = hf_token , repo_type = "dataset" )
122121 except (requests .exceptions .HTTPError , ValueError ): # catch http error and token invalid error
123122 pass
124123
@@ -133,8 +132,8 @@ def hf_private_dataset_repo_zipped_txt_data(hf_private_dataset_repo_zipped_txt_d
133132@pytest .fixture (scope = "session" )
134133def hf_private_dataset_repo_zipped_img_data_ (hf_api : HfApi , hf_token , zip_image_path ):
135134 repo_name = f"repo_zipped_img_data-{ int (time .time () * 10e3 )} "
136- create_repo (hf_api , repo_name , token = hf_token , organization = CI_HUB_USER , repo_type = "dataset" , private = True )
137135 repo_id = f"{ CI_HUB_USER } /{ repo_name } "
136+ create_repo (hf_api , repo_id , token = hf_token , repo_type = "dataset" , private = True )
138137 hf_api .upload_file (
139138 token = hf_token ,
140139 path_or_fileobj = str (zip_image_path ),
@@ -144,7 +143,7 @@ def hf_private_dataset_repo_zipped_img_data_(hf_api: HfApi, hf_token, zip_image_
144143 )
145144 yield repo_id
146145 try :
147- delete_repo (hf_api , repo_name , token = hf_token , organization = CI_HUB_USER , repo_type = "dataset" )
146+ delete_repo (hf_api , repo_id , token = hf_token , repo_type = "dataset" )
148147 except (requests .exceptions .HTTPError , ValueError ): # catch http error and token invalid error
149148 pass
150149
0 commit comments