-
Notifications
You must be signed in to change notification settings - Fork 63
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
5 changed files
with
174 additions
and
60 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,83 @@ | ||
import { PassportData } from '../../../common/src/utils/types'; | ||
import { findSubarrayIndex, formatMrz, hash } from '../../../common/src/utils/utils'; | ||
import { parseCertificate } from '../../../common/src/utils/certificates/handleCertificate'; | ||
|
||
export interface PassportMetadata { | ||
dataGroups: string; | ||
dg1HashFunction: string; | ||
dg1HashOffset: number; | ||
eContentSize: number; | ||
eContentHashFunction: string; | ||
eContentHashOffset: number; | ||
signedAttrSize: number; | ||
signedAttrHashFunction: string; | ||
countryCode?: string; | ||
} | ||
|
||
export function findHashSizeOfEContent(eContent: number[], signedAttr: number[]) { | ||
const allHashes = ['sha512', 'sha384', 'sha256', 'sha1']; | ||
for (const hashFunction of allHashes) { | ||
const hashValue = hash(hashFunction, eContent); | ||
const hashOffset = findSubarrayIndex(signedAttr, hashValue); | ||
if (hashOffset !== -1) { | ||
return { hashFunction, offset: hashOffset }; | ||
} | ||
} | ||
return { hashFunction: 'unknown', offset: -1 }; | ||
} | ||
|
||
export function findDG1HashInEContent(mrz: string, eContent: number[]): { hash: number[], hashFunction: string } | null { | ||
const hashFunctions = ['sha512', 'sha384', 'sha256', 'sha1']; | ||
const formattedMrz = formatMrz(mrz); | ||
|
||
for (const hashFunction of hashFunctions) { | ||
const hashValue = hash(hashFunction, formattedMrz); | ||
const hashOffset = findSubarrayIndex(eContent, hashValue); | ||
|
||
if (hashOffset !== -1) { | ||
return { hash: hashValue, hashFunction }; | ||
} | ||
} | ||
return null; | ||
} | ||
|
||
export function getCountryCodeFromMrz(mrz: string): string { | ||
return mrz.substring(2, 5); | ||
} | ||
|
||
export function parsePassportData(passportData: PassportData): PassportMetadata { | ||
// Extract DG1 hash info | ||
const dg1HashInfo = passportData.mrz ? | ||
findDG1HashInEContent(passportData.mrz, passportData.eContent) : | ||
null; | ||
|
||
// Use extracted DG1 hash if found, otherwise use provided dg1Hash | ||
const dg1Hash = dg1HashInfo?.hash || passportData.dg1Hash; | ||
const dg1HashFunction = dg1HashInfo?.hashFunction || 'unknown'; | ||
|
||
const dg1HashOffset = dg1Hash | ||
? findSubarrayIndex( | ||
passportData.eContent, | ||
dg1Hash.map(byte => byte > 127 ? byte - 256 : byte) | ||
) | ||
: 0; | ||
|
||
const { hashFunction: eContentHashFunction, offset: eContentHashOffset } = | ||
findHashSizeOfEContent(passportData.eContent, passportData.signedAttr); | ||
|
||
const dscHashFunction = passportData.dsc ? | ||
parseCertificate(passportData.dsc).hashFunction : | ||
'unknown'; | ||
|
||
return { | ||
dataGroups: passportData.dgPresents?.toString().split(',').map(item => item.replace('DG', '')).join(',') || 'None', | ||
dg1HashFunction, | ||
dg1HashOffset, | ||
eContentSize: passportData.eContent?.length || 0, | ||
eContentHashFunction, | ||
eContentHashOffset, | ||
signedAttrSize: passportData.signedAttr?.length || 0, | ||
signedAttrHashFunction: dscHashFunction, | ||
countryCode: passportData.mrz ? getCountryCodeFromMrz(passportData.mrz) : undefined | ||
}; | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,3 +1,4 @@ | ||
.env | ||
.env.local | ||
outputs/ | ||
outputs/ | ||
src/passport_data/passport_data/ |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,55 @@ | ||
import fs from 'fs'; | ||
import path from 'path'; | ||
import { PassportData } from '../../../common/src/utils/types'; | ||
import { parsePassportData } from '../../../app/src/utils/parsePassportData'; | ||
|
||
function parsePassportFile(filePath: string) { | ||
try { | ||
const fileContent = fs.readFileSync(filePath, 'utf8'); | ||
const passportData = JSON.parse(fileContent) as PassportData; | ||
|
||
const info = parsePassportData(passportData); | ||
|
||
// Print the results | ||
console.log(`\nProcessing file: ${path.basename(filePath)}`); | ||
console.log('----------------------------------------'); | ||
if (info.countryCode) console.log(`Country Code: ${info.countryCode}`); | ||
console.log(`Data Groups: ${info.dataGroups}`); | ||
console.log(`DG1 Hash Function: ${info.dg1HashFunction}`); | ||
console.log(`DG1 Hash Offset: ${info.dg1HashOffset}`); | ||
console.log(`eContent Size: ${info.eContentSize}`); | ||
console.log(`eContent Hash Function: ${info.eContentHashFunction}`); | ||
console.log(`eContent Hash Offset: ${info.eContentHashOffset}`); | ||
console.log(`Signed Attributes Size: ${info.signedAttrSize}`); | ||
console.log(`Signed Attributes Hash Function: ${info.signedAttrHashFunction}`); | ||
|
||
} catch (error) { | ||
console.error(`Error processing file ${filePath}:`, error); | ||
} | ||
} | ||
|
||
function main() { | ||
const directoryPath = path.join(__dirname, 'passport_data'); | ||
console.log(directoryPath); | ||
|
||
try { | ||
const files = fs.readdirSync(directoryPath); | ||
const jsonFiles = files.filter(file => file.endsWith('.json')); | ||
|
||
if (jsonFiles.length === 0) { | ||
console.log('No JSON files found in the passport_data directory'); | ||
return; | ||
} | ||
|
||
jsonFiles.forEach(file => { | ||
const filePath = path.join(directoryPath, file); | ||
parsePassportFile(filePath); | ||
}); | ||
|
||
} catch (error) { | ||
console.error('Error reading directory:', error); | ||
} | ||
} | ||
|
||
// Execute the script | ||
main(); |