Cie1 commited on
Commit
436c84d
Β·
verified Β·
1 Parent(s): 2585e49

Update arrow files

Browse files
Files changed (1) hide show
  1. decrypt_after_load.py +71 -41
decrypt_after_load.py CHANGED
@@ -35,6 +35,8 @@ from datasets import load_dataset, load_from_disk, Dataset
35
  from PIL import Image
36
  from typing import Dict, Any
37
  import os
 
 
38
 
39
  def derive_key(password: str, length: int) -> bytes:
40
  """Derive encryption key from password using SHA-256."""
@@ -105,72 +107,95 @@ def decrypt_sample(sample: Dict[str, Any], canary: str) -> Dict[str, Any]:
105
 
106
  return decrypted_sample
107
 
108
- def decrypt_dataset(encrypted_dataset: Dataset, canary: str, output_path: str = None) -> Dataset:
 
 
 
 
 
 
109
  """
110
  Decrypt an already-loaded dataset object.
111
-
112
  Args:
113
  encrypted_dataset: Already loaded Dataset object to decrypt
114
  canary: Canary string used for encryption
115
  output_path: Path to save decrypted dataset (optional)
116
-
117
- Returns:
118
- Decrypted Dataset object
119
  """
120
  if not isinstance(encrypted_dataset, Dataset):
121
  raise TypeError(f"Expected Dataset object, got {type(encrypted_dataset)}")
122
 
 
 
 
 
 
123
  print(f"πŸ“Š Dataset contains {len(encrypted_dataset)} samples")
124
  print(f"πŸ”§ Features: {list(encrypted_dataset.features.keys())}")
125
  print(f"πŸ”‘ Using canary string: {canary}")
 
126
 
127
- # Decrypt the dataset using map function for efficiency
128
- print(f"πŸ”„ Decrypting dataset...")
129
-
130
  def decrypt_batch(batch):
131
- """Decrypt a batch of samples."""
132
- # Get the number of samples in the batch
133
- num_samples = len(batch[list(batch.keys())[0]])
134
-
135
- # Process each sample in the batch
136
- decrypted_batch = {key: [] for key in batch.keys()}
137
-
138
- for i in range(num_samples):
139
- # Extract single sample from batch
140
- sample = {key: batch[key][i] for key in batch.keys()}
141
-
142
- # Decrypt sample
143
- decrypted_sample = decrypt_sample(sample, canary)
144
-
145
- # Add to decrypted batch
146
- for key in decrypted_batch.keys():
147
- decrypted_batch[key].append(decrypted_sample.get(key))
148
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
149
  return decrypted_batch
150
-
151
- # Apply decryption with batching
152
  decrypted_dataset = encrypted_dataset.map(
153
  decrypt_batch,
154
  batched=True,
155
- batch_size=50,
156
- desc="Decrypting samples"
 
157
  )
158
 
159
- print(f"βœ… Decryption completed!")
160
  print(f"πŸ“ Decrypted {len(decrypted_dataset)} samples")
161
- print(f"πŸ”“ Text fields decrypted: question, answer, video_url, arxiv_id")
162
- print(f"πŸ–ΌοΈ Images: kept as-is (not encrypted in current version)")
163
- print(f"πŸ“‹ Metadata preserved: category, difficulty, subtask, etc.")
164
 
165
- # Save if output path provided
166
  if output_path:
167
  print(f"πŸ’Ύ Saving decrypted dataset to: {output_path}")
168
  decrypted_dataset.save_to_disk(output_path)
169
- print(f"βœ… Saved successfully!")
170
 
171
  return decrypted_dataset
172
 
173
- def decrypt_mmsearch_plus(dataset_path: str, canary: str, output_path: str = None, from_hub: bool = False):
 
 
 
 
 
 
 
 
174
  """
175
  Load and decrypt the MMSearch-Plus dataset.
176
 
@@ -179,6 +204,8 @@ def decrypt_mmsearch_plus(dataset_path: str, canary: str, output_path: str = Non
179
  canary: Canary string used for encryption
180
  output_path: Path to save decrypted dataset (optional)
181
  from_hub: Whether to load from HuggingFace Hub (default: auto-detect)
 
 
182
  """
183
  # Auto-detect if loading from hub (contains "/" and doesn't exist locally)
184
  if not from_hub:
@@ -187,17 +214,21 @@ def decrypt_mmsearch_plus(dataset_path: str, canary: str, output_path: str = Non
187
  # Load the encrypted dataset
188
  if from_hub:
189
  print(f"πŸ”“ Loading encrypted dataset from HuggingFace Hub: {dataset_path}")
190
- # Load from HuggingFace Hub without trust_remote_code
191
  encrypted_dataset = load_dataset(dataset_path, split='train')
192
  else:
193
  print(f"πŸ”“ Loading encrypted dataset from local path: {dataset_path}")
194
- # Check if path exists
195
  if not Path(dataset_path).exists():
196
  raise ValueError(f"Dataset path does not exist: {dataset_path}")
197
  encrypted_dataset = load_from_disk(dataset_path)
198
 
199
- # Use decrypt_dataset to handle the actual decryption
200
- return decrypt_dataset(encrypted_dataset, canary, output_path)
 
 
 
 
 
 
201
 
202
  def main():
203
  parser = argparse.ArgumentParser(
@@ -266,4 +297,3 @@ Examples:
266
 
267
  if __name__ == "__main__":
268
  main()
269
-
 
35
  from PIL import Image
36
  from typing import Dict, Any
37
  import os
38
+ import multiprocessing
39
+
40
 
41
  def derive_key(password: str, length: int) -> bytes:
42
  """Derive encryption key from password using SHA-256."""
 
107
 
108
  return decrypted_sample
109
 
110
+ def decrypt_dataset(
111
+ encrypted_dataset: Dataset,
112
+ canary: str,
113
+ output_path: str = None,
114
+ num_proc: int = None,
115
+ batch_size: int = 1000,
116
+ ) -> Dataset:
117
  """
118
  Decrypt an already-loaded dataset object.
119
+
120
  Args:
121
  encrypted_dataset: Already loaded Dataset object to decrypt
122
  canary: Canary string used for encryption
123
  output_path: Path to save decrypted dataset (optional)
124
+ num_proc: Number of processes for parallel decryption (defaults to CPU count)
125
+ batch_size: Batch size for Dataset.map
 
126
  """
127
  if not isinstance(encrypted_dataset, Dataset):
128
  raise TypeError(f"Expected Dataset object, got {type(encrypted_dataset)}")
129
 
130
+ if num_proc is None:
131
+ # Leave 1 core free so your machine stays responsive
132
+ cpu_count = multiprocessing.cpu_count()
133
+ num_proc = max(1, cpu_count - 1)
134
+
135
  print(f"πŸ“Š Dataset contains {len(encrypted_dataset)} samples")
136
  print(f"πŸ”§ Features: {list(encrypted_dataset.features.keys())}")
137
  print(f"πŸ”‘ Using canary string: {canary}")
138
+ print(f"🧡 Using {num_proc} processes, batch_size={batch_size}")
139
 
140
+ # Vectorized batch decryption (column-wise)
 
 
141
  def decrypt_batch(batch):
142
+ decrypted_batch = dict(batch) # shallow copy
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
143
 
144
+ text_fields = ['question', 'video_url', 'arxiv_id']
145
+ for field in text_fields:
146
+ if field in batch:
147
+ decrypted_batch[field] = [
148
+ decrypt_text(x, canary) if x else x
149
+ for x in batch[field]
150
+ ]
151
+
152
+ # answer: list[list[str]]
153
+ if 'answer' in batch:
154
+ decrypted_answers = []
155
+ for answers in batch['answer']:
156
+ if answers:
157
+ decrypted_answers.append([
158
+ decrypt_text(a, canary) if a else a
159
+ for a in answers
160
+ ])
161
+ else:
162
+ decrypted_answers.append(answers)
163
+ decrypted_batch['answer'] = decrypted_answers
164
+
165
+ # Images are kept as-is (not encrypted)
166
  return decrypted_batch
167
+
168
+ print("πŸ”„ Decrypting dataset with multiprocessing...")
169
  decrypted_dataset = encrypted_dataset.map(
170
  decrypt_batch,
171
  batched=True,
172
+ batch_size=batch_size,
173
+ num_proc=num_proc,
174
+ desc="Decrypting samples",
175
  )
176
 
177
+ print("βœ… Decryption completed!")
178
  print(f"πŸ“ Decrypted {len(decrypted_dataset)} samples")
179
+ print("πŸ”“ Text fields decrypted: question, answer, video_url, arxiv_id")
180
+ print("πŸ–ΌοΈ Images: kept as-is (not encrypted in current version)")
181
+ print("πŸ“‹ Metadata preserved: category, difficulty, subtask, etc.")
182
 
 
183
  if output_path:
184
  print(f"πŸ’Ύ Saving decrypted dataset to: {output_path}")
185
  decrypted_dataset.save_to_disk(output_path)
186
+ print("βœ… Saved successfully!")
187
 
188
  return decrypted_dataset
189
 
190
+
191
+ def decrypt_mmsearch_plus(
192
+ dataset_path: str,
193
+ canary: str,
194
+ output_path: str = None,
195
+ from_hub: bool = False,
196
+ num_proc: int = None,
197
+ batch_size: int = 1000,
198
+ ):
199
  """
200
  Load and decrypt the MMSearch-Plus dataset.
201
 
 
204
  canary: Canary string used for encryption
205
  output_path: Path to save decrypted dataset (optional)
206
  from_hub: Whether to load from HuggingFace Hub (default: auto-detect)
207
+ num_proc: Number of processes for parallel decryption
208
+ batch_size: Batch size for Dataset.map
209
  """
210
  # Auto-detect if loading from hub (contains "/" and doesn't exist locally)
211
  if not from_hub:
 
214
  # Load the encrypted dataset
215
  if from_hub:
216
  print(f"πŸ”“ Loading encrypted dataset from HuggingFace Hub: {dataset_path}")
 
217
  encrypted_dataset = load_dataset(dataset_path, split='train')
218
  else:
219
  print(f"πŸ”“ Loading encrypted dataset from local path: {dataset_path}")
 
220
  if not Path(dataset_path).exists():
221
  raise ValueError(f"Dataset path does not exist: {dataset_path}")
222
  encrypted_dataset = load_from_disk(dataset_path)
223
 
224
+ return decrypt_dataset(
225
+ encrypted_dataset,
226
+ canary,
227
+ output_path=output_path,
228
+ num_proc=num_proc,
229
+ batch_size=batch_size,
230
+ )
231
+
232
 
233
  def main():
234
  parser = argparse.ArgumentParser(
 
297
 
298
  if __name__ == "__main__":
299
  main()