Datasets:
File size: 7,312 Bytes
318ef3b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Convert JSONL files to Parquet format with support for Indic languages.
This script discovers all JSONL files in a specified language folder and
converts them to Parquet format using the HuggingFace datasets library.
"""
import os
import sys
import json
import logging
import argparse
from pathlib import Path
from typing import Dict, List, Any, Optional
import datasets
from datasets import Dataset
# Configure logging
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
handlers=[
logging.StreamHandler(),
logging.FileHandler('convert_to_parquet.log')
]
)
logger = logging.getLogger(__name__)
# List of supported language folders
SUPPORTED_LANGUAGES = ['ar', 'bn', 'gu', 'hi', 'kn', 'ml', 'mr', 'pa', 'ta', 'te', 'ur']
# Required fields in each JSON record
REQUIRED_FIELDS = ['en', 'translation', 'target_lang', 'domain', 'complexity']
def validate_language_folder(folder_path: str) -> bool:
"""
Validate if the provided language folder exists and is supported.
Args:
folder_path: Path to the language folder
Returns:
bool: True if valid, False otherwise
"""
if not os.path.isdir(folder_path):
logger.error(f"Directory does not exist: {folder_path}")
return False
# Extract the language code from the folder path
lang_code = os.path.basename(folder_path)
if lang_code not in SUPPORTED_LANGUAGES:
logger.warning(f"Language '{lang_code}' is not in the list of supported languages: {SUPPORTED_LANGUAGES}")
# We'll allow processing anyway but warn the user
return True
def discover_jsonl_files(folder_path: str) -> List[str]:
"""
Discover all JSONL files in the specified folder.
Args:
folder_path: Path to the language folder
Returns:
List[str]: List of paths to JSONL files
"""
jsonl_files = []
for root, _, files in os.walk(folder_path):
for file in files:
if file.endswith('.jsonl'):
jsonl_files.append(os.path.join(root, file))
logger.info(f"Found {len(jsonl_files)} JSONL files in {folder_path}")
return jsonl_files
def is_valid_json_record(record: Dict[str, Any]) -> bool:
"""
Validate that a JSON record contains all required fields.
Args:
record: JSON record to validate
Returns:
bool: True if valid, False otherwise
"""
for field in REQUIRED_FIELDS:
if field not in record:
return False
return True
def load_and_validate_jsonl(file_path: str) -> Optional[List[Dict[str, Any]]]:
"""
Load a JSONL file and validate each record.
Args:
file_path: Path to the JSONL file
Returns:
Optional[List[Dict[str, Any]]]: List of valid JSON records or None if errors occurred
"""
valid_records = []
invalid_count = 0
try:
with open(file_path, 'r', encoding='utf-8') as f:
for i, line in enumerate(f, 1):
try:
line = line.strip()
if not line:
continue
record = json.loads(line)
if is_valid_json_record(record):
valid_records.append(record)
else:
logger.warning(f"Line {i} in {file_path} is missing required fields")
invalid_count += 1
except json.JSONDecodeError:
logger.warning(f"Failed to parse JSON at line {i} in {file_path}")
invalid_count += 1
except Exception as e:
logger.warning(f"Error processing line {i} in {file_path}: {str(e)}")
invalid_count += 1
if invalid_count > 0:
logger.warning(f"Found {invalid_count} invalid records in {file_path}")
logger.info(f"Successfully loaded {len(valid_records)} valid records from {file_path}")
return valid_records
except Exception as e:
logger.error(f"Failed to process file {file_path}: {str(e)}")
return None
def convert_jsonl_to_parquet(jsonl_file: str) -> bool:
"""
Convert a JSONL file to Parquet format.
Args:
jsonl_file: Path to the JSONL file
Returns:
bool: True if conversion was successful, False otherwise
"""
try:
logger.info(f"Processing {jsonl_file}")
# Load and validate the JSONL file
records = load_and_validate_jsonl(jsonl_file)
if not records:
logger.error(f"No valid records found in {jsonl_file}")
return False
# Create output file path
output_file = jsonl_file.replace('.jsonl', '.parquet')
# Create the dataset
dataset = Dataset.from_list(records)
# Save as Parquet
dataset.to_parquet(output_file)
logger.info(f"Successfully converted {jsonl_file} to {output_file}")
return True
except Exception as e:
logger.error(f"Failed to convert {jsonl_file} to Parquet: {str(e)}")
return False
def process_language_folder(folder_path: str) -> Dict[str, int]:
"""
Process all JSONL files in a language folder.
Args:
folder_path: Path to the language folder
Returns:
Dict[str, int]: Statistics about the conversion process
"""
stats = {
'total': 0,
'success': 0,
'failed': 0
}
if not validate_language_folder(folder_path):
return stats
jsonl_files = discover_jsonl_files(folder_path)
stats['total'] = len(jsonl_files)
if not jsonl_files:
logger.warning(f"No JSONL files found in {folder_path}")
return stats
for jsonl_file in jsonl_files:
success = convert_jsonl_to_parquet(jsonl_file)
if success:
stats['success'] += 1
else:
stats['failed'] += 1
return stats
def main():
"""
Main entry point for the script.
"""
parser = argparse.ArgumentParser(
description='Convert JSONL files to Parquet format with support for Indic languages.'
)
parser.add_argument(
'language_folder',
type=str,
help='Path to the language folder (e.g., ar, bn, gu, hi, kn, ml, mr, pa, ta, te, ur)'
)
args = parser.parse_args()
logger.info("Starting conversion process")
stats = process_language_folder(args.language_folder)
logger.info(f"Conversion completed. Stats: {stats}")
if stats['failed'] > 0:
logger.warning(f"Failed to convert {stats['failed']} out of {stats['total']} files.")
return 1
return 0
if __name__ == '__main__':
sys.exit(main())
|