securityos/node_modules/music-metadata/lib/flac/FlacParser.js

176 lines
7.7 KiB
JavaScript

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.FlacParser = void 0;
const token_types_1 = require("token-types");
const debug_1 = require("debug");
const util = require("../common/Util");
const Vorbis_1 = require("../ogg/vorbis/Vorbis");
const AbstractID3Parser_1 = require("../id3v2/AbstractID3Parser");
const FourCC_1 = require("../common/FourCC");
const VorbisParser_1 = require("../ogg/vorbis/VorbisParser");
const VorbisDecoder_1 = require("../ogg/vorbis/VorbisDecoder");
const debug = (0, debug_1.default)('music-metadata:parser:FLAC');
/**
* FLAC supports up to 128 kinds of metadata blocks; currently the following are defined:
* ref: https://xiph.org/flac/format.html#metadata_block
*/
var BlockType;
(function (BlockType) {
BlockType[BlockType["STREAMINFO"] = 0] = "STREAMINFO";
BlockType[BlockType["PADDING"] = 1] = "PADDING";
BlockType[BlockType["APPLICATION"] = 2] = "APPLICATION";
BlockType[BlockType["SEEKTABLE"] = 3] = "SEEKTABLE";
BlockType[BlockType["VORBIS_COMMENT"] = 4] = "VORBIS_COMMENT";
BlockType[BlockType["CUESHEET"] = 5] = "CUESHEET";
BlockType[BlockType["PICTURE"] = 6] = "PICTURE";
})(BlockType || (BlockType = {}));
class FlacParser extends AbstractID3Parser_1.AbstractID3Parser {
constructor() {
super(...arguments);
this.padding = 0;
}
/**
* Initialize parser with output (metadata), input (tokenizer) & parsing options (options).
* @param {INativeMetadataCollector} metadata Output
* @param {ITokenizer} tokenizer Input
* @param {IOptions} options Parsing options
*/
init(metadata, tokenizer, options) {
super.init(metadata, tokenizer, options);
this.vorbisParser = new VorbisParser_1.VorbisParser(metadata, options);
return this;
}
async postId3v2Parse() {
const fourCC = await this.tokenizer.readToken(FourCC_1.FourCcToken);
if (fourCC.toString() !== 'fLaC') {
throw new Error('Invalid FLAC preamble');
}
let blockHeader;
do {
// Read block header
blockHeader = await this.tokenizer.readToken(Metadata.BlockHeader);
// Parse block data
await this.parseDataBlock(blockHeader);
} while (!blockHeader.lastBlock);
if (this.tokenizer.fileInfo.size && this.metadata.format.duration) {
const dataSize = this.tokenizer.fileInfo.size - this.tokenizer.position;
this.metadata.setFormat('bitrate', 8 * dataSize / this.metadata.format.duration);
}
}
parseDataBlock(blockHeader) {
debug(`blockHeader type=${blockHeader.type}, length=${blockHeader.length}`);
switch (blockHeader.type) {
case BlockType.STREAMINFO:
return this.parseBlockStreamInfo(blockHeader.length);
case BlockType.PADDING:
this.padding += blockHeader.length;
break;
case BlockType.APPLICATION:
break;
case BlockType.SEEKTABLE:
break;
case BlockType.VORBIS_COMMENT:
return this.parseComment(blockHeader.length);
case BlockType.CUESHEET:
break;
case BlockType.PICTURE:
return this.parsePicture(blockHeader.length).then();
default:
this.metadata.addWarning('Unknown block type: ' + blockHeader.type);
}
// Ignore data block
return this.tokenizer.ignore(blockHeader.length).then();
}
/**
* Parse STREAMINFO
*/
async parseBlockStreamInfo(dataLen) {
if (dataLen !== Metadata.BlockStreamInfo.len)
throw new Error('Unexpected block-stream-info length');
const streamInfo = await this.tokenizer.readToken(Metadata.BlockStreamInfo);
this.metadata.setFormat('container', 'FLAC');
this.metadata.setFormat('codec', 'FLAC');
this.metadata.setFormat('lossless', true);
this.metadata.setFormat('numberOfChannels', streamInfo.channels);
this.metadata.setFormat('bitsPerSample', streamInfo.bitsPerSample);
this.metadata.setFormat('sampleRate', streamInfo.sampleRate);
if (streamInfo.totalSamples > 0) {
this.metadata.setFormat('duration', streamInfo.totalSamples / streamInfo.sampleRate);
}
}
/**
* Parse VORBIS_COMMENT
* Ref: https://www.xiph.org/vorbis/doc/Vorbis_I_spec.html#x1-640004.2.3
*/
async parseComment(dataLen) {
const data = await this.tokenizer.readToken(new token_types_1.Uint8ArrayType(dataLen));
const decoder = new VorbisDecoder_1.VorbisDecoder(data, 0);
decoder.readStringUtf8(); // vendor (skip)
const commentListLength = decoder.readInt32();
for (let i = 0; i < commentListLength; i++) {
const tag = decoder.parseUserComment();
this.vorbisParser.addTag(tag.key, tag.value);
}
}
async parsePicture(dataLen) {
if (this.options.skipCovers) {
return this.tokenizer.ignore(dataLen);
}
else {
const picture = await this.tokenizer.readToken(new Vorbis_1.VorbisPictureToken(dataLen));
this.vorbisParser.addTag('METADATA_BLOCK_PICTURE', picture);
}
}
}
exports.FlacParser = FlacParser;
class Metadata {
}
Metadata.BlockHeader = {
len: 4,
get: (buf, off) => {
return {
lastBlock: util.getBit(buf, off, 7),
type: util.getBitAllignedNumber(buf, off, 1, 7),
length: token_types_1.UINT24_BE.get(buf, off + 1)
};
}
};
/**
* METADATA_BLOCK_DATA
* Ref: https://xiph.org/flac/format.html#metadata_block_streaminfo
*/
Metadata.BlockStreamInfo = {
len: 34,
get: (buf, off) => {
return {
// The minimum block size (in samples) used in the stream.
minimumBlockSize: token_types_1.UINT16_BE.get(buf, off),
// The maximum block size (in samples) used in the stream.
// (Minimum blocksize == maximum blocksize) implies a fixed-blocksize stream.
maximumBlockSize: token_types_1.UINT16_BE.get(buf, off + 2) / 1000,
// The minimum frame size (in bytes) used in the stream.
// May be 0 to imply the value is not known.
minimumFrameSize: token_types_1.UINT24_BE.get(buf, off + 4),
// The maximum frame size (in bytes) used in the stream.
// May be 0 to imply the value is not known.
maximumFrameSize: token_types_1.UINT24_BE.get(buf, off + 7),
// Sample rate in Hz. Though 20 bits are available,
// the maximum sample rate is limited by the structure of frame headers to 655350Hz.
// Also, a value of 0 is invalid.
sampleRate: token_types_1.UINT24_BE.get(buf, off + 10) >> 4,
// probably slower: sampleRate: common.getBitAllignedNumber(buf, off + 10, 0, 20),
// (number of channels)-1. FLAC supports from 1 to 8 channels
channels: util.getBitAllignedNumber(buf, off + 12, 4, 3) + 1,
// bits per sample)-1.
// FLAC supports from 4 to 32 bits per sample. Currently the reference encoder and decoders only support up to 24 bits per sample.
bitsPerSample: util.getBitAllignedNumber(buf, off + 12, 7, 5) + 1,
// Total samples in stream.
// 'Samples' means inter-channel sample, i.e. one second of 44.1Khz audio will have 44100 samples regardless of the number of channels.
// A value of zero here means the number of total samples is unknown.
totalSamples: util.getBitAllignedNumber(buf, off + 13, 4, 36),
// the MD5 hash of the file (see notes for usage... it's a littly tricky)
fileMD5: new token_types_1.Uint8ArrayType(16).get(buf, off + 18)
};
}
};