NCERT-Parallel-Dataset-Indic / convert_to_parquet.py
HimanshU
Add parquet format conversion for all JSONL files in all languages
318ef3b
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Convert JSONL files to Parquet format with support for Indic languages.
This script discovers all JSONL files in a specified language folder and
converts them to Parquet format using the HuggingFace datasets library.
"""
import os
import sys
import json
import logging
import argparse
from pathlib import Path
from typing import Dict, List, Any, Optional
import datasets
from datasets import Dataset
# Configure logging
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
handlers=[
logging.StreamHandler(),
logging.FileHandler('convert_to_parquet.log')
]
)
logger = logging.getLogger(__name__)
# List of supported language folders
SUPPORTED_LANGUAGES = ['ar', 'bn', 'gu', 'hi', 'kn', 'ml', 'mr', 'pa', 'ta', 'te', 'ur']
# Required fields in each JSON record
REQUIRED_FIELDS = ['en', 'translation', 'target_lang', 'domain', 'complexity']
def validate_language_folder(folder_path: str) -> bool:
"""
Validate if the provided language folder exists and is supported.
Args:
folder_path: Path to the language folder
Returns:
bool: True if valid, False otherwise
"""
if not os.path.isdir(folder_path):
logger.error(f"Directory does not exist: {folder_path}")
return False
# Extract the language code from the folder path
lang_code = os.path.basename(folder_path)
if lang_code not in SUPPORTED_LANGUAGES:
logger.warning(f"Language '{lang_code}' is not in the list of supported languages: {SUPPORTED_LANGUAGES}")
# We'll allow processing anyway but warn the user
return True
def discover_jsonl_files(folder_path: str) -> List[str]:
"""
Discover all JSONL files in the specified folder.
Args:
folder_path: Path to the language folder
Returns:
List[str]: List of paths to JSONL files
"""
jsonl_files = []
for root, _, files in os.walk(folder_path):
for file in files:
if file.endswith('.jsonl'):
jsonl_files.append(os.path.join(root, file))
logger.info(f"Found {len(jsonl_files)} JSONL files in {folder_path}")
return jsonl_files
def is_valid_json_record(record: Dict[str, Any]) -> bool:
"""
Validate that a JSON record contains all required fields.
Args:
record: JSON record to validate
Returns:
bool: True if valid, False otherwise
"""
for field in REQUIRED_FIELDS:
if field not in record:
return False
return True
def load_and_validate_jsonl(file_path: str) -> Optional[List[Dict[str, Any]]]:
"""
Load a JSONL file and validate each record.
Args:
file_path: Path to the JSONL file
Returns:
Optional[List[Dict[str, Any]]]: List of valid JSON records or None if errors occurred
"""
valid_records = []
invalid_count = 0
try:
with open(file_path, 'r', encoding='utf-8') as f:
for i, line in enumerate(f, 1):
try:
line = line.strip()
if not line:
continue
record = json.loads(line)
if is_valid_json_record(record):
valid_records.append(record)
else:
logger.warning(f"Line {i} in {file_path} is missing required fields")
invalid_count += 1
except json.JSONDecodeError:
logger.warning(f"Failed to parse JSON at line {i} in {file_path}")
invalid_count += 1
except Exception as e:
logger.warning(f"Error processing line {i} in {file_path}: {str(e)}")
invalid_count += 1
if invalid_count > 0:
logger.warning(f"Found {invalid_count} invalid records in {file_path}")
logger.info(f"Successfully loaded {len(valid_records)} valid records from {file_path}")
return valid_records
except Exception as e:
logger.error(f"Failed to process file {file_path}: {str(e)}")
return None
def convert_jsonl_to_parquet(jsonl_file: str) -> bool:
"""
Convert a JSONL file to Parquet format.
Args:
jsonl_file: Path to the JSONL file
Returns:
bool: True if conversion was successful, False otherwise
"""
try:
logger.info(f"Processing {jsonl_file}")
# Load and validate the JSONL file
records = load_and_validate_jsonl(jsonl_file)
if not records:
logger.error(f"No valid records found in {jsonl_file}")
return False
# Create output file path
output_file = jsonl_file.replace('.jsonl', '.parquet')
# Create the dataset
dataset = Dataset.from_list(records)
# Save as Parquet
dataset.to_parquet(output_file)
logger.info(f"Successfully converted {jsonl_file} to {output_file}")
return True
except Exception as e:
logger.error(f"Failed to convert {jsonl_file} to Parquet: {str(e)}")
return False
def process_language_folder(folder_path: str) -> Dict[str, int]:
"""
Process all JSONL files in a language folder.
Args:
folder_path: Path to the language folder
Returns:
Dict[str, int]: Statistics about the conversion process
"""
stats = {
'total': 0,
'success': 0,
'failed': 0
}
if not validate_language_folder(folder_path):
return stats
jsonl_files = discover_jsonl_files(folder_path)
stats['total'] = len(jsonl_files)
if not jsonl_files:
logger.warning(f"No JSONL files found in {folder_path}")
return stats
for jsonl_file in jsonl_files:
success = convert_jsonl_to_parquet(jsonl_file)
if success:
stats['success'] += 1
else:
stats['failed'] += 1
return stats
def main():
"""
Main entry point for the script.
"""
parser = argparse.ArgumentParser(
description='Convert JSONL files to Parquet format with support for Indic languages.'
)
parser.add_argument(
'language_folder',
type=str,
help='Path to the language folder (e.g., ar, bn, gu, hi, kn, ml, mr, pa, ta, te, ur)'
)
args = parser.parse_args()
logger.info("Starting conversion process")
stats = process_language_folder(args.language_folder)
logger.info(f"Conversion completed. Stats: {stats}")
if stats['failed'] > 0:
logger.warning(f"Failed to convert {stats['failed']} out of {stats['total']} files.")
return 1
return 0
if __name__ == '__main__':
sys.exit(main())