Merge ee5f52ebc22da8a707895fc1eab76b62268607fd into 8b28bd1a184e912b55847b4c31bd6c3e1af4049c
This commit is contained in:
commit
56ce9d4112
1
.gitignore
vendored
1
.gitignore
vendored
@ -23,3 +23,4 @@ bin/
|
||||
|
||||
# logs
|
||||
*.log
|
||||
/downloads/
|
||||
|
||||
@ -1049,6 +1049,7 @@ public class DownloadDialog extends DialogFragment
|
||||
final Stream selectedStream;
|
||||
Stream secondaryStream = null;
|
||||
final char kind;
|
||||
final boolean embedMetadata = dialogBinding.metadataSwitch.isChecked();
|
||||
int threads = dialogBinding.threads.getProgress() + 1;
|
||||
final String[] urls;
|
||||
final List<MissionRecoveryInfo> recoveryInfo;
|
||||
@ -1062,11 +1063,13 @@ public class DownloadDialog extends DialogFragment
|
||||
kind = 'a';
|
||||
selectedStream = audioStreamsAdapter.getItem(selectedAudioIndex);
|
||||
|
||||
if (selectedStream.getFormat() == MediaFormat.M4A) {
|
||||
psName = Postprocessing.ALGORITHM_M4A_NO_DASH;
|
||||
} else if (selectedStream.getFormat() == MediaFormat.WEBMA_OPUS) {
|
||||
psName = Postprocessing.ALGORITHM_OGG_FROM_WEBM_DEMUXER;
|
||||
}
|
||||
psName = switch (selectedStream.getFormat()) {
|
||||
case M4A -> Postprocessing.ALGORITHM_M4A_NO_DASH;
|
||||
case WEBMA_OPUS -> Postprocessing.ALGORITHM_OGG_FROM_WEBM_DEMUXER;
|
||||
case MP3 -> Postprocessing.ALGORITHM_MP3_METADATA;
|
||||
default -> null;
|
||||
};
|
||||
|
||||
break;
|
||||
case R.id.video_button:
|
||||
kind = 'v';
|
||||
@ -1093,6 +1096,8 @@ public class DownloadDialog extends DialogFragment
|
||||
if (secondary.getSizeInBytes() > 0 && videoSize > 0) {
|
||||
nearLength = secondary.getSizeInBytes() + videoSize;
|
||||
}
|
||||
} else if (selectedStream.getFormat() == MediaFormat.MPEG_4) {
|
||||
psName = Postprocessing.ALGORITHM_MP4_METADATA;
|
||||
}
|
||||
break;
|
||||
case R.id.subtitle_button:
|
||||
@ -1132,8 +1137,8 @@ public class DownloadDialog extends DialogFragment
|
||||
);
|
||||
}
|
||||
|
||||
DownloadManagerService.startMission(context, urls, storage, kind, threads,
|
||||
currentInfo, psName, psArgs, nearLength, new ArrayList<>(recoveryInfo));
|
||||
DownloadManagerService.startMission(context, urls, storage, kind, threads, currentInfo,
|
||||
psName, embedMetadata, psArgs, nearLength, new ArrayList<>(recoveryInfo));
|
||||
|
||||
Toast.makeText(context, getString(R.string.download_has_started),
|
||||
Toast.LENGTH_SHORT).show();
|
||||
|
||||
@ -216,9 +216,9 @@ public abstract class BaseDescriptionFragment extends BaseFragment {
|
||||
|| image.getWidth() != Image.WIDTH_UNKNOWN
|
||||
// if even the resolution level is unknown, ?x? will be shown
|
||||
|| image.getEstimatedResolutionLevel() == Image.ResolutionLevel.UNKNOWN) {
|
||||
urls.append(imageSizeToText(image.getHeight()));
|
||||
urls.append('x');
|
||||
urls.append(imageSizeToText(image.getWidth()));
|
||||
urls.append('x');
|
||||
urls.append(imageSizeToText(image.getHeight()));
|
||||
} else {
|
||||
switch (image.getEstimatedResolutionLevel()) {
|
||||
case LOW -> urls.append(getString(R.string.image_quality_low));
|
||||
|
||||
@ -1,5 +1,8 @@
|
||||
package org.schabi.newpipe.streams;
|
||||
|
||||
import android.graphics.Bitmap;
|
||||
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfo;
|
||||
import org.schabi.newpipe.streams.Mp4DashReader.Hdlr;
|
||||
import org.schabi.newpipe.streams.Mp4DashReader.Mdia;
|
||||
import org.schabi.newpipe.streams.Mp4DashReader.Mp4DashChunk;
|
||||
@ -13,7 +16,17 @@ import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.ArrayList;
|
||||
|
||||
import us.shandian.giga.postprocessing.Mp4MetadataHelper;
|
||||
|
||||
/**
|
||||
* MP4 muxer that builds a standard MP4 file from DASH fragmented MP4 sources.
|
||||
*
|
||||
* @see <a href="https://atomicparsley.sourceforge.net/mpeg-4files.html">
|
||||
* https://atomicparsley.sourceforge.net/mpeg-4files.html</a> for a quick summary on
|
||||
* the MP4 file format and its specification.
|
||||
* @see <a href="https://developer.apple.com/documentation/quicktime-file-format/">
|
||||
* Apple Quick Time Format Specification</a> which is the basis for MP4 file format
|
||||
* and contains detailed information about the structure of MP4 files.
|
||||
* @author kapodamy
|
||||
*/
|
||||
public class Mp4FromDashWriter {
|
||||
@ -50,13 +63,41 @@ public class Mp4FromDashWriter {
|
||||
|
||||
private final ArrayList<Integer> compatibleBrands = new ArrayList<>(5);
|
||||
|
||||
public Mp4FromDashWriter(final SharpStream... sources) throws IOException {
|
||||
|
||||
private final boolean embedMetadata;
|
||||
private final Mp4MetadataHelper metadataHelper;
|
||||
|
||||
public Mp4FromDashWriter(final boolean embedMetadata,
|
||||
final StreamInfo streamInfo,
|
||||
final Bitmap thumbnail,
|
||||
final SharpStream... sources) throws IOException {
|
||||
for (final SharpStream src : sources) {
|
||||
if (!src.canRewind() && !src.canRead()) {
|
||||
throw new IOException("All sources must be readable and allow rewind");
|
||||
}
|
||||
}
|
||||
|
||||
this.embedMetadata = embedMetadata;
|
||||
this.metadataHelper = new Mp4MetadataHelper(
|
||||
this::auxOffset,
|
||||
buffer -> {
|
||||
try {
|
||||
auxWrite(buffer);
|
||||
} catch (final IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
},
|
||||
offset -> {
|
||||
try {
|
||||
return lengthFor(offset);
|
||||
} catch (final IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
},
|
||||
streamInfo,
|
||||
thumbnail
|
||||
);
|
||||
|
||||
sourceTracks = sources;
|
||||
readers = new Mp4DashReader[sourceTracks.length];
|
||||
readersChunks = new Mp4DashChunk[readers.length];
|
||||
@ -712,10 +753,14 @@ public class Mp4FromDashWriter {
|
||||
|
||||
makeMvhd(longestTrack);
|
||||
|
||||
if (embedMetadata) {
|
||||
metadataHelper.makeUdta();
|
||||
}
|
||||
|
||||
for (int i = 0; i < tracks.length; i++) {
|
||||
if (tracks[i].trak.tkhd.matrix.length != 36) {
|
||||
throw
|
||||
new RuntimeException("bad track matrix length (expected 36) in track n°" + i);
|
||||
throw new RuntimeException(
|
||||
"bad track matrix length (expected 36) in track n°" + i);
|
||||
}
|
||||
makeTrak(i, durations[i], defaultMediaTime[i], tablesInfo[i], is64);
|
||||
}
|
||||
@ -763,7 +808,7 @@ public class Mp4FromDashWriter {
|
||||
final int mediaTime;
|
||||
|
||||
if (tracks[index].trak.edstElst == null) {
|
||||
// is a audio track ¿is edst/elst optional for audio tracks?
|
||||
// is an audio track; is edst/elst optional for audio tracks?
|
||||
mediaTime = 0x00; // ffmpeg set this value as zero, instead of defaultMediaTime
|
||||
bMediaRate = 0x00010000;
|
||||
} else {
|
||||
@ -871,33 +916,41 @@ public class Mp4FromDashWriter {
|
||||
return offset + 0x14;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a Sample Group Description Box.
|
||||
*
|
||||
* <p>
|
||||
* What does it do?
|
||||
* <br>
|
||||
* The table inside of this box gives information about the
|
||||
* characteristics of sample groups. The descriptive information is any other
|
||||
* information needed to define or characterize the sample group.
|
||||
* </p>
|
||||
*
|
||||
* <p>
|
||||
* ¿is replicable this box?
|
||||
* <br>
|
||||
* NO due lacks of documentation about this box but...
|
||||
* most of m4a encoders and ffmpeg uses this box with dummy values (same values)
|
||||
* </p>
|
||||
*
|
||||
* @return byte array with the 'sgpd' box
|
||||
*/
|
||||
private byte[] makeSgpd() {
|
||||
/*
|
||||
* Sample Group Description Box
|
||||
*
|
||||
* ¿whats does?
|
||||
* the table inside of this box gives information about the
|
||||
* characteristics of sample groups. The descriptive information is any other
|
||||
* information needed to define or characterize the sample group.
|
||||
*
|
||||
* ¿is replicable this box?
|
||||
* NO due lacks of documentation about this box but...
|
||||
* most of m4a encoders and ffmpeg uses this box with dummy values (same values)
|
||||
*/
|
||||
|
||||
final ByteBuffer buffer = ByteBuffer.wrap(new byte[] {
|
||||
0x00, 0x00, 0x00, 0x1A, // box size
|
||||
0x73, 0x67, 0x70, 0x64, // "sgpd"
|
||||
0x01, 0x00, 0x00, 0x00, // box flags (unknown flag sets)
|
||||
0x72, 0x6F, 0x6C, 0x6C, // ¿¿group type??
|
||||
0x00, 0x00, 0x00, 0x02, // ¿¿??
|
||||
0x00, 0x00, 0x00, 0x01, // ¿¿??
|
||||
(byte) 0xFF, (byte) 0xFF // ¿¿??
|
||||
0x72, 0x6F, 0x6C, 0x6C, // group type??
|
||||
0x00, 0x00, 0x00, 0x02, // ??
|
||||
0x00, 0x00, 0x00, 0x01, // ??
|
||||
(byte) 0xFF, (byte) 0xFF // ??
|
||||
});
|
||||
|
||||
return buffer.array();
|
||||
}
|
||||
|
||||
|
||||
static class TablesInfo {
|
||||
int stts;
|
||||
int stsc;
|
||||
|
||||
@ -2,41 +2,100 @@ package org.schabi.newpipe.streams;
|
||||
|
||||
import static org.schabi.newpipe.MainActivity.DEBUG;
|
||||
|
||||
import android.graphics.Bitmap;
|
||||
import android.util.Log;
|
||||
import android.util.Pair;
|
||||
|
||||
import androidx.annotation.NonNull;
|
||||
import androidx.annotation.Nullable;
|
||||
|
||||
import org.schabi.newpipe.extractor.stream.SongMetadata;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfo;
|
||||
import org.schabi.newpipe.streams.WebMReader.Cluster;
|
||||
import org.schabi.newpipe.streams.WebMReader.Segment;
|
||||
import org.schabi.newpipe.streams.WebMReader.SimpleBlock;
|
||||
import org.schabi.newpipe.streams.WebMReader.WebMTrack;
|
||||
import org.schabi.newpipe.streams.io.SharpStream;
|
||||
import org.schabi.newpipe.util.StreamInfoMetadataHelper;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.ByteOrder;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Base64;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.Arrays;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* This class is used to convert a WebM stream containing Opus or Vorbis audio
|
||||
* into an Ogg stream.
|
||||
* </p>
|
||||
*
|
||||
* <p>
|
||||
* The following specifications are used for the implementation:
|
||||
* </p>
|
||||
* <ul>
|
||||
* <li>FLAC: <a href="https://www.rfc-editor.org/rfc/rfc9639">RFC 9639</a></li>
|
||||
* <li>
|
||||
* Vorbis: <a href="https://www.xiph.org/vorbis/doc/Vorbis_I_spec.html">Vorbis I</a>.
|
||||
* <br>
|
||||
* Vorbis uses FLAC picture blocks for embedding cover art in the metadata.
|
||||
* </li>
|
||||
* <li>Opus: All specs can be found at <a href="https://opus-codec.org/docs/">
|
||||
* https://opus-codec.org/docs/</a>.
|
||||
* <a href="https://datatracker.ietf.org/doc/html/rfc7845.html">RFC7845</a>
|
||||
* defines the Ogg encapsulation for Opus streams, i.e.the container format and metadata.
|
||||
* <br>
|
||||
* Opus uses multiple Vorbis I features, e.g. the comment header format for metadata.
|
||||
* </li>
|
||||
* </ul>
|
||||
*
|
||||
* @author kapodamy
|
||||
* @author tobigr
|
||||
*/
|
||||
public class OggFromWebMWriter implements Closeable {
|
||||
private static final String TAG = OggFromWebMWriter.class.getSimpleName();
|
||||
|
||||
/**
|
||||
* No flags set.
|
||||
*/
|
||||
private static final byte FLAG_UNSET = 0x00;
|
||||
//private static final byte FLAG_CONTINUED = 0x01;
|
||||
/**
|
||||
* The packet is continued from previous the previous page.
|
||||
*/
|
||||
private static final byte FLAG_CONTINUED = 0x01;
|
||||
/**
|
||||
* BOS (beginning of stream).
|
||||
*/
|
||||
private static final byte FLAG_FIRST = 0x02;
|
||||
/**
|
||||
* EOS (end of stream).
|
||||
*/
|
||||
private static final byte FLAG_LAST = 0x04;
|
||||
|
||||
private static final byte HEADER_CHECKSUM_OFFSET = 22;
|
||||
private static final byte HEADER_SIZE = 27;
|
||||
|
||||
private static final int TIME_SCALE_NS = 1000000000;
|
||||
private static final int TIME_SCALE_NS = 1_000_000_000;
|
||||
|
||||
/**
|
||||
* The maximum size of a segment in the Ogg page, in bytes.
|
||||
* This is a fixed value defined by the Ogg specification.
|
||||
*/
|
||||
private static final int OGG_SEGMENT_SIZE = 255;
|
||||
|
||||
/**
|
||||
* The maximum size of the Opus packet in bytes, to be included in the Ogg page.
|
||||
* @see <a href="https://datatracker.ietf.org/doc/html/rfc7845.html#section-6">
|
||||
* RFC7845 6. Packet Size Limits</a>
|
||||
*/
|
||||
private static final int OPUS_MAX_PACKETS_PAGE_SIZE = 65_025;
|
||||
|
||||
private boolean done = false;
|
||||
private boolean parsed = false;
|
||||
@ -58,14 +117,27 @@ public class OggFromWebMWriter implements Closeable {
|
||||
private long webmBlockNearDuration = 0;
|
||||
|
||||
private short segmentTableSize = 0;
|
||||
private final byte[] segmentTable = new byte[255];
|
||||
private final byte[] segmentTable = new byte[OGG_SEGMENT_SIZE];
|
||||
private long segmentTableNextTimestamp = TIME_SCALE_NS;
|
||||
|
||||
private final int[] crc32Table = new int[256];
|
||||
private final boolean embedMetadata;
|
||||
private final StreamInfo streamInfo;
|
||||
private final Bitmap thumbnail;
|
||||
|
||||
public OggFromWebMWriter(@NonNull final SharpStream source, @NonNull final SharpStream target,
|
||||
@Nullable final StreamInfo streamInfo) {
|
||||
/**
|
||||
* Constructor of OggFromWebMWriter.
|
||||
* @param source
|
||||
* @param target
|
||||
* @param embedMetadata whether to embed metadata in the output Ogg stream
|
||||
* @param streamInfo the stream info
|
||||
* @param thumbnail the thumbnail bitmap used as cover art
|
||||
*/
|
||||
public OggFromWebMWriter(@NonNull final SharpStream source,
|
||||
@NonNull final SharpStream target,
|
||||
final boolean embedMetadata,
|
||||
@Nullable final StreamInfo streamInfo,
|
||||
@Nullable final Bitmap thumbnail) {
|
||||
if (!source.canRead() || !source.canRewind()) {
|
||||
throw new IllegalArgumentException("source stream must be readable and allows seeking");
|
||||
}
|
||||
@ -75,7 +147,9 @@ public class OggFromWebMWriter implements Closeable {
|
||||
|
||||
this.source = source;
|
||||
this.output = target;
|
||||
this.embedMetadata = embedMetadata;
|
||||
this.streamInfo = streamInfo;
|
||||
this.thumbnail = thumbnail;
|
||||
|
||||
this.streamId = (int) System.currentTimeMillis();
|
||||
|
||||
@ -188,18 +262,17 @@ public class OggFromWebMWriter implements Closeable {
|
||||
/* step 2: create packet with code init data */
|
||||
if (webmTrack.codecPrivate != null) {
|
||||
addPacketSegment(webmTrack.codecPrivate.length);
|
||||
makePacketheader(0x00, header, webmTrack.codecPrivate);
|
||||
makePacketHeader(0x00, header, webmTrack.codecPrivate);
|
||||
write(header);
|
||||
output.write(webmTrack.codecPrivate);
|
||||
}
|
||||
|
||||
/* step 3: create packet with metadata */
|
||||
final byte[] buffer = makeMetadata();
|
||||
if (buffer != null) {
|
||||
addPacketSegment(buffer.length);
|
||||
makePacketheader(0x00, header, buffer);
|
||||
write(header);
|
||||
output.write(buffer);
|
||||
if (embedMetadata) {
|
||||
final byte[] buffer = makeCommentHeader();
|
||||
if (buffer != null) {
|
||||
addPacketSegmentMultiPage(buffer, header);
|
||||
}
|
||||
}
|
||||
|
||||
/* step 4: calculate amount of packets */
|
||||
@ -236,7 +309,7 @@ public class OggFromWebMWriter implements Closeable {
|
||||
elapsedNs = Math.ceil(elapsedNs * resolution);
|
||||
|
||||
// create header and calculate page checksum
|
||||
int checksum = makePacketheader((long) elapsedNs, header, null);
|
||||
int checksum = makePacketHeader((long) elapsedNs, header, null);
|
||||
checksum = calcCrc32(checksum, page.array(), page.position());
|
||||
|
||||
header.putInt(HEADER_CHECKSUM_OFFSET, checksum);
|
||||
@ -249,7 +322,7 @@ public class OggFromWebMWriter implements Closeable {
|
||||
}
|
||||
}
|
||||
|
||||
private int makePacketheader(final long granPos, @NonNull final ByteBuffer buffer,
|
||||
private int makePacketHeader(final long granPos, @NonNull final ByteBuffer buffer,
|
||||
final byte[] immediatePage) {
|
||||
short length = HEADER_SIZE;
|
||||
|
||||
@ -282,40 +355,78 @@ public class OggFromWebMWriter implements Closeable {
|
||||
return checksumCrc32;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates the metadata header for the selected codec (Opus or Vorbis).
|
||||
*
|
||||
* @see <a href="https://datatracker.ietf.org/doc/html/rfc7845.html#section-5.2">
|
||||
* RFC7845 5.2. Comment Header</a> for OPUS metadata header format
|
||||
* @see <a href="https://xiph.org/vorbis/doc/Vorbis_I_spec.html#x1-610004.2">
|
||||
* Vorbis I 4.2. Header decode and decode setup</a> and
|
||||
* <a href="https://xiph.org/vorbis/doc/Vorbis_I_spec.html#x1-820005">
|
||||
* Vorbis I 5. comment field and header specification</a>
|
||||
* for VORBIS metadata header format. Vorbis I 5. lists all the possible metadata tags.
|
||||
*
|
||||
* @return the metadata header as a byte array, or null if the codec is not supported
|
||||
* for metadata generation
|
||||
*/
|
||||
@Nullable
|
||||
private byte[] makeMetadata() {
|
||||
private byte[] makeCommentHeader() {
|
||||
if (DEBUG) {
|
||||
Log.d("OggFromWebMWriter", "Downloading media with codec ID " + webmTrack.codecId);
|
||||
Log.d(TAG, "Downloading media with codec ID " + webmTrack.codecId);
|
||||
}
|
||||
|
||||
final var metadata = new ArrayList<Pair<String, String>>();
|
||||
if (streamInfo != null) {
|
||||
final SongMetadata songMetadata = streamInfo.getSongMetadata();
|
||||
final StreamInfoMetadataHelper metadHelper = new StreamInfoMetadataHelper(streamInfo);
|
||||
// metadata that can be present in the stream info and the song metadata.
|
||||
// Use the song metadata if available, otherwise fallback to stream info.
|
||||
metadata.add(Pair.create("COMMENT", streamInfo.getUrl()));
|
||||
metadata.add(Pair.create("GENRE", metadHelper.getGenre()));
|
||||
metadata.add(Pair.create("ARTIST", metadHelper.getArtist()));
|
||||
metadata.add(Pair.create("TITLE", metadHelper.getTitle()));
|
||||
metadata.add(Pair.create("DATE", metadHelper.getReleaseDate()
|
||||
.getLocalDateTime()
|
||||
.format(DateTimeFormatter.ISO_DATE)));
|
||||
// Additional metadata that is only present in the song metadata
|
||||
if (songMetadata != null) {
|
||||
metadata.add(Pair.create("ALBUM", songMetadata.album));
|
||||
if (songMetadata.track != SongMetadata.TRACK_UNKNOWN) {
|
||||
// TRACKNUMBER is suggested in Vorbis spec,
|
||||
// but TRACK is more commonly used in practice
|
||||
metadata.add(Pair.create("TRACKNUMBER", String.valueOf(songMetadata.track)));
|
||||
metadata.add(Pair.create("TRACK", String.valueOf(songMetadata.track)));
|
||||
}
|
||||
metadata.add(Pair.create("PERFORMER", String.join(", ", songMetadata.performer)));
|
||||
metadata.add(Pair.create("ORGANIZATION", songMetadata.label));
|
||||
metadata.add(Pair.create("COPYRIGHT", songMetadata.copyright));
|
||||
}
|
||||
// Add thumbnail as cover art at the end because it is the largest metadata entry
|
||||
if (thumbnail != null) {
|
||||
metadata.add(makeFlacPictureTag(thumbnail));
|
||||
}
|
||||
}
|
||||
|
||||
if (DEBUG) {
|
||||
Log.d(TAG, "Creating metadata header with this data:");
|
||||
metadata.forEach(p -> Log.d(TAG, p.first + "=" + p.second));
|
||||
}
|
||||
|
||||
if ("A_OPUS".equals(webmTrack.codecId)) {
|
||||
final var metadata = new ArrayList<Pair<String, String>>();
|
||||
if (streamInfo != null) {
|
||||
metadata.add(Pair.create("COMMENT", streamInfo.getUrl()));
|
||||
metadata.add(Pair.create("GENRE", streamInfo.getCategory()));
|
||||
metadata.add(Pair.create("ARTIST", streamInfo.getUploaderName()));
|
||||
metadata.add(Pair.create("TITLE", streamInfo.getName()));
|
||||
metadata.add(Pair.create("DATE", streamInfo
|
||||
.getUploadDate()
|
||||
.getLocalDateTime()
|
||||
.format(DateTimeFormatter.ISO_DATE)));
|
||||
}
|
||||
|
||||
if (DEBUG) {
|
||||
Log.d("OggFromWebMWriter", "Creating metadata header with this data:");
|
||||
metadata.forEach(p -> {
|
||||
Log.d("OggFromWebMWriter", p.first + "=" + p.second);
|
||||
});
|
||||
}
|
||||
|
||||
return makeOpusTagsHeader(metadata);
|
||||
} else if ("A_VORBIS".equals(webmTrack.codecId)) {
|
||||
return new byte[]{
|
||||
0x03, // ¿¿¿???
|
||||
0x76, 0x6f, 0x72, 0x62, 0x69, 0x73, // "vorbis" binary string
|
||||
0x00, 0x00, 0x00, 0x00, // writing application string size (not present)
|
||||
0x00, 0x00, 0x00, 0x00 // additional tags count (zero means no tags)
|
||||
// See RFC7845 5.2: https://datatracker.ietf.org/doc/html/rfc7845.html#section-5.2
|
||||
final byte[] identificationHeader = new byte[]{
|
||||
0x4F, 0x70, 0x75, 0x73, 0x54, 0x61, 0x67, 0x73, // "OpusTags" binary string
|
||||
0x00, 0x00, 0x00, 0x00, // vendor (aka. Encoder) string of length 0
|
||||
};
|
||||
return makeCommentHeader(metadata, identificationHeader);
|
||||
} else if ("A_VORBIS".equals(webmTrack.codecId)) {
|
||||
// See https://xiph.org/vorbis/doc/Vorbis_I_spec.html#x1-610004.2
|
||||
final byte[] identificationHeader = new byte[]{
|
||||
0x03, // packet type for Vorbis comment header
|
||||
0x76, 0x6f, 0x72, 0x62, 0x69, 0x73, // "vorbis" binary string
|
||||
0x00, 0x00, 0x00, 0x00, // vendor (aka. Encoder) string of length 0
|
||||
};
|
||||
return makeCommentHeader(metadata, identificationHeader);
|
||||
}
|
||||
|
||||
// not implemented for the desired codec
|
||||
@ -325,12 +436,12 @@ public class OggFromWebMWriter implements Closeable {
|
||||
/**
|
||||
* This creates a single metadata tag for use in opus metadata headers. It contains the four
|
||||
* byte string length field and includes the string as-is. This cannot be used independently,
|
||||
* but must follow a proper "OpusTags" header.
|
||||
* but must follow a proper Comment header.
|
||||
*
|
||||
* @param pair A key-value pair in the format "KEY=some value"
|
||||
* @return The binary data of the encoded metadata tag
|
||||
*/
|
||||
private static byte[] makeOpusMetadataTag(final Pair<String, String> pair) {
|
||||
private static byte[] makeVorbisMetadataTag(final Pair<String, String> pair) {
|
||||
final var keyValue = pair.first.toUpperCase() + "=" + pair.second.trim();
|
||||
|
||||
final var bytes = keyValue.getBytes();
|
||||
@ -342,21 +453,85 @@ public class OggFromWebMWriter implements Closeable {
|
||||
}
|
||||
|
||||
/**
|
||||
* This returns a complete "OpusTags" header, created from the provided metadata tags.
|
||||
* Generates a FLAC picture block for the provided bitmap.
|
||||
*
|
||||
* <p>
|
||||
* You probably want to use makeOpusMetadata(), which uses this function to create
|
||||
* a header with sensible metadata filled in.
|
||||
* The {@code METADATA_BLOCK_PICTURE} tag is defined in the FLAC specification (RFC 9639)
|
||||
* and is supported by Opus and Vorbis metadata headers.
|
||||
* The picture block contains the image data which is converted to JPEG
|
||||
* and associated metadata such as picture type, dimensions, and color depth.
|
||||
* The image data is Base64-encoded as per specification.
|
||||
* </p>
|
||||
*
|
||||
* @see <a href="https://www.rfc-editor.org/rfc/rfc9639.html#section-8.8">
|
||||
* RFC 9639 8.8 Picture</a>
|
||||
*
|
||||
* @param bitmap The bitmap to use for the picture block
|
||||
* @return The key-value pair representing the tag.
|
||||
* The key is {@code METADATA_BLOCK_PICTURE}
|
||||
* and the value is the Base64-encoded FLAC picture block.
|
||||
*/
|
||||
private static Pair<String, String> makeFlacPictureTag(final Bitmap bitmap) {
|
||||
// FLAC picture block format (big-endian):
|
||||
// uint32 picture_type
|
||||
// uint32 mime_length,
|
||||
// mime_string
|
||||
// uint32 desc_length,
|
||||
// desc_string
|
||||
// uint32 width
|
||||
// uint32 height
|
||||
// uint32 color_depth
|
||||
// uint32 colors_indexed
|
||||
// uint32 data_length,
|
||||
// data_bytes
|
||||
|
||||
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||
bitmap.compress(Bitmap.CompressFormat.JPEG, 100, baos);
|
||||
|
||||
final byte[] imageData = baos.toByteArray();
|
||||
final byte[] mimeBytes = "image/jpeg".getBytes(StandardCharsets.UTF_8);
|
||||
final byte[] descBytes = new byte[0]; // optional description
|
||||
// fixed ints + mime + desc
|
||||
final int headerSize = 4 * 8 + mimeBytes.length + descBytes.length;
|
||||
final ByteBuffer buf = ByteBuffer.allocate(headerSize + imageData.length);
|
||||
// See https://www.rfc-editor.org/rfc/rfc9639.html#table-13 for the complete list
|
||||
// of picture types
|
||||
// TODO: allow specifying other picture types, i.e. cover (front) for music albums;
|
||||
// but this info needs to be provided by the extractor first.
|
||||
buf.putInt(3); // picture type: 0 = Other, 2 = Cover (front)
|
||||
buf.putInt(mimeBytes.length);
|
||||
buf.put(mimeBytes);
|
||||
buf.putInt(descBytes.length);
|
||||
// no description
|
||||
if (descBytes.length > 0) {
|
||||
buf.put(descBytes);
|
||||
}
|
||||
buf.putInt(bitmap.getWidth());
|
||||
buf.putInt(bitmap.getHeight());
|
||||
buf.putInt(24); // color depth for JPEG and PNG is usually 24 bits
|
||||
buf.putInt(0); // colors indexed (0 for non-indexed images like JPEG)
|
||||
buf.putInt(imageData.length);
|
||||
buf.put(imageData);
|
||||
final String b64 = Base64.getEncoder().encodeToString(buf.array());
|
||||
return Pair.create("METADATA_BLOCK_PICTURE", b64);
|
||||
}
|
||||
|
||||
/**
|
||||
* This returns a complete Comment header, created from the provided metadata tags.
|
||||
*
|
||||
* @param keyValueLines A list of pairs of the tags. This can also be though of as a mapping
|
||||
* from one key to multiple values.
|
||||
* @param identificationHeader the identification header for the codec,
|
||||
* which is required to be prefixed to the comment header.
|
||||
* @return The binary header
|
||||
*/
|
||||
private static byte[] makeOpusTagsHeader(final List<Pair<String, String>> keyValueLines) {
|
||||
private static byte[] makeCommentHeader(final List<Pair<String, String>> keyValueLines,
|
||||
final byte[] identificationHeader) {
|
||||
final var tags = keyValueLines
|
||||
.stream()
|
||||
.filter(p -> !p.second.isBlank())
|
||||
.map(OggFromWebMWriter::makeOpusMetadataTag)
|
||||
.collect(Collectors.toUnmodifiableList());
|
||||
.filter(p -> p.second != null && !p.second.isBlank())
|
||||
.map(OggFromWebMWriter::makeVorbisMetadataTag)
|
||||
.toList();
|
||||
|
||||
final var tagsBytes = tags.stream().collect(Collectors.summingInt(arr -> arr.length));
|
||||
|
||||
@ -365,10 +540,7 @@ public class OggFromWebMWriter implements Closeable {
|
||||
|
||||
final var head = ByteBuffer.allocate(byteCount);
|
||||
head.order(ByteOrder.LITTLE_ENDIAN);
|
||||
head.put(new byte[]{
|
||||
0x4F, 0x70, 0x75, 0x73, 0x54, 0x61, 0x67, 0x73, // "OpusTags" binary string
|
||||
0x00, 0x00, 0x00, 0x00, // vendor (aka. Encoder) string of length 0
|
||||
});
|
||||
head.put(identificationHeader);
|
||||
head.putInt(tags.size()); // 4 bytes for tag count
|
||||
tags.forEach(head::put); // dynamic amount of tag bytes
|
||||
|
||||
@ -448,17 +620,19 @@ public class OggFromWebMWriter implements Closeable {
|
||||
}
|
||||
|
||||
private boolean addPacketSegment(final int size) {
|
||||
if (size > 65025) {
|
||||
throw new UnsupportedOperationException("page size cannot be larger than 65025");
|
||||
if (size > OPUS_MAX_PACKETS_PAGE_SIZE) {
|
||||
throw new UnsupportedOperationException(String.format(
|
||||
"page size is %s but cannot be larger than %s",
|
||||
size, OPUS_MAX_PACKETS_PAGE_SIZE));
|
||||
}
|
||||
|
||||
int available = (segmentTable.length - segmentTableSize) * 255;
|
||||
final boolean extra = (size % 255) == 0;
|
||||
int available = (segmentTable.length - segmentTableSize) * OGG_SEGMENT_SIZE;
|
||||
final boolean extra = (size % OGG_SEGMENT_SIZE) == 0;
|
||||
|
||||
if (extra) {
|
||||
// add a zero byte entry in the table
|
||||
// required to indicate the sample size is multiple of 255
|
||||
available -= 255;
|
||||
// required to indicate the sample size is multiple of OGG_SEGMENT_SIZE
|
||||
available -= OGG_SEGMENT_SIZE;
|
||||
}
|
||||
|
||||
// check if possible add the segment, without overflow the table
|
||||
@ -466,8 +640,8 @@ public class OggFromWebMWriter implements Closeable {
|
||||
return false; // not enough space on the page
|
||||
}
|
||||
|
||||
for (int seg = size; seg > 0; seg -= 255) {
|
||||
segmentTable[segmentTableSize++] = (byte) Math.min(seg, 255);
|
||||
for (int seg = size; seg > 0; seg -= OGG_SEGMENT_SIZE) {
|
||||
segmentTable[segmentTableSize++] = (byte) Math.min(seg, OGG_SEGMENT_SIZE);
|
||||
}
|
||||
|
||||
if (extra) {
|
||||
@ -477,6 +651,102 @@ public class OggFromWebMWriter implements Closeable {
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Like {@link #addPacketSegment(SimpleBlock)} for large metadata blobs
|
||||
* splits the provided data into multiple pages if necessary
|
||||
* and writes them immediately (header + data).
|
||||
* This method is intended to be used only for metadata (e.g. large thumbnails).
|
||||
*
|
||||
* @param data the metadata to add as a packet segment
|
||||
* @param header a reusable ByteBuffer for writing page headers; this method will write
|
||||
* the header for each page as needed
|
||||
*/
|
||||
private void addPacketSegmentMultiPage(@NonNull final byte[] data,
|
||||
@NonNull final ByteBuffer header) throws IOException {
|
||||
int offset = 0;
|
||||
boolean first = true;
|
||||
|
||||
while (offset < data.length) {
|
||||
final int remaining = data.length - offset;
|
||||
final boolean finalChunkCandidate = remaining <= OPUS_MAX_PACKETS_PAGE_SIZE;
|
||||
final int chunkSize;
|
||||
if (finalChunkCandidate) {
|
||||
chunkSize = remaining; // final chunk can be any size
|
||||
} else {
|
||||
// For intermediate (non-final) chunks, make the chunk size a multiple
|
||||
// of OGG_SEGMENT_SIZE so that the last lacing value is 255 and the
|
||||
// decoder won't treat the packet as finished on that page.
|
||||
final int maxFullSegments = OPUS_MAX_PACKETS_PAGE_SIZE / OGG_SEGMENT_SIZE;
|
||||
chunkSize = maxFullSegments * OGG_SEGMENT_SIZE;
|
||||
}
|
||||
|
||||
final boolean isFinalChunk = (offset + chunkSize) >= data.length;
|
||||
|
||||
// We must reserve appropriate number of lacing values in the segment table.
|
||||
// For chunks that are exact multiples of OGG_SEGMENT_SIZE and are the final
|
||||
// chunk of the packet, a trailing 0 lacing entry is required to indicate
|
||||
// the packet ends exactly on a segment boundary. For intermediate chunks
|
||||
// (continued across pages) we MUST NOT write that trailing 0 because then
|
||||
// the packet would appear complete on that page. Instead intermediate
|
||||
// chunks should end with only 255-valued lacing entries (no trailing 0).
|
||||
final int fullSegments = chunkSize / OGG_SEGMENT_SIZE; // may be 0
|
||||
final int lastSegSize = chunkSize % OGG_SEGMENT_SIZE; // 0..254
|
||||
final boolean chunkIsMultiple = (lastSegSize == 0);
|
||||
|
||||
int requiredEntries = fullSegments + (lastSegSize > 0 ? 1 : 0);
|
||||
if (chunkIsMultiple && isFinalChunk) {
|
||||
// need an extra zero entry to mark packet end
|
||||
requiredEntries += 1;
|
||||
}
|
||||
|
||||
// If the segment table doesn't have enough room, flush the current page
|
||||
// by writing a header without immediate data. This clears the segment table.
|
||||
if (requiredEntries > (segmentTable.length - segmentTableSize)) {
|
||||
// flush current page
|
||||
int checksum = makePacketHeader(0x00, header, null);
|
||||
checksum = calcCrc32(checksum, new byte[0], 0);
|
||||
header.putInt(HEADER_CHECKSUM_OFFSET, checksum);
|
||||
write(header);
|
||||
}
|
||||
|
||||
// After ensuring space, if still not enough (edge case), throw
|
||||
if (requiredEntries > (segmentTable.length - segmentTableSize)) {
|
||||
throw new IOException("Unable to reserve segment table entries for metadata chunk");
|
||||
}
|
||||
|
||||
// Fill the segment table entries for this chunk. For intermediate chunks
|
||||
// that are an exact multiple of OGG_SEGMENT_SIZE we must NOT append a
|
||||
// trailing zero entry (that would incorrectly signal packet end).
|
||||
final int remainingToAssign = chunkSize;
|
||||
for (int seg = remainingToAssign; seg > 0; seg -= OGG_SEGMENT_SIZE) {
|
||||
segmentTable[segmentTableSize++] = (byte) Math.min(seg, OGG_SEGMENT_SIZE);
|
||||
}
|
||||
|
||||
if (chunkIsMultiple && isFinalChunk) {
|
||||
// Only append the zero terminator for a final chunk that has an exact
|
||||
// multiple of OGG_SEGMENT_SIZE bytes.
|
||||
segmentTable[segmentTableSize++] = 0x00;
|
||||
}
|
||||
|
||||
// For continuation pages (after the first), mark the page as continued.
|
||||
if (!first) {
|
||||
packetFlag = FLAG_CONTINUED;
|
||||
}
|
||||
|
||||
final byte[] chunk = Arrays.copyOfRange(data, offset, offset + chunkSize);
|
||||
|
||||
// Now create header (which will consume and clear the segment table) and write
|
||||
// header + chunk data. makePacketHeader will compute checksum including chunk
|
||||
// when an immediatePage is provided.
|
||||
makePacketHeader(0x00, header, chunk);
|
||||
write(header);
|
||||
output.write(chunk);
|
||||
|
||||
offset += chunkSize;
|
||||
first = false;
|
||||
}
|
||||
}
|
||||
|
||||
private void populateCrc32Table() {
|
||||
for (int i = 0; i < 0x100; i++) {
|
||||
int crc = i << 24;
|
||||
|
||||
@ -0,0 +1,57 @@
|
||||
package org.schabi.newpipe.util
|
||||
|
||||
import org.schabi.newpipe.extractor.localization.DateWrapper
|
||||
import org.schabi.newpipe.extractor.stream.SongMetadata
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfo
|
||||
|
||||
class StreamInfoMetadataHelper(
|
||||
val streamInfo: StreamInfo
|
||||
) {
|
||||
val songInfo: SongMetadata? = streamInfo.songMetadata
|
||||
|
||||
fun getTitle(): String? {
|
||||
if (songInfo?.title?.contentEquals(streamInfo.name) == true) {
|
||||
// YT Music uses uppercase chars in the description, but the StreamInfo name is using
|
||||
// the correct case, so we prefer that
|
||||
return streamInfo.name
|
||||
}
|
||||
return if (songInfo?.title?.isBlank() == false) songInfo.title else streamInfo.name
|
||||
}
|
||||
|
||||
fun getArtist(): String? {
|
||||
if (songInfo?.artist?.contentEquals(streamInfo.uploaderName) == true) {
|
||||
// YT Music uses uppercase chars in the description, but the uploader name is using
|
||||
// the correct case, so we prefer the uploader name
|
||||
return streamInfo.uploaderName
|
||||
}
|
||||
return if (songInfo?.artist?.isBlank() == false) {
|
||||
songInfo.artist
|
||||
} else {
|
||||
streamInfo.uploaderName
|
||||
}
|
||||
}
|
||||
|
||||
fun getPerformer(): List<String?> = songInfo?.performer ?: emptyList()
|
||||
|
||||
fun getComposer(): String? = songInfo?.composer
|
||||
|
||||
fun getGenre(): String? = if (songInfo?.genre?.isEmpty() == false) {
|
||||
songInfo.genre
|
||||
} else {
|
||||
streamInfo.category
|
||||
}
|
||||
|
||||
fun getAlbum(): String? = songInfo?.album
|
||||
|
||||
fun getTrackNumber(): Int? = if (songInfo?.track != SongMetadata.TRACK_UNKNOWN) songInfo?.track else null
|
||||
|
||||
fun getDuration(): Long = songInfo?.duration?.seconds ?: streamInfo.duration
|
||||
|
||||
fun getReleaseDate(): DateWrapper = songInfo?.releaseDate ?: streamInfo.uploadDate
|
||||
|
||||
fun getRecordLabel(): String? = songInfo?.label
|
||||
|
||||
fun getCopyright(): String? = songInfo?.copyright ?: streamInfo.licence
|
||||
|
||||
fun getLocation(): String? = songInfo?.location
|
||||
}
|
||||
@ -186,7 +186,7 @@ object ImageStrategy {
|
||||
fun dbUrlToImageList(url: String?): List<Image> {
|
||||
return when (url) {
|
||||
null -> listOf()
|
||||
else -> listOf(Image(url, -1, -1, ResolutionLevel.UNKNOWN))
|
||||
else -> listOf(Image(url, Image.HEIGHT_UNKNOWN, Image.WIDTH_UNKNOWN, ResolutionLevel.UNKNOWN))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,5 +1,7 @@
|
||||
package us.shandian.giga.get;
|
||||
|
||||
import android.content.Context;
|
||||
import android.graphics.Bitmap;
|
||||
import android.os.Handler;
|
||||
import android.system.ErrnoException;
|
||||
import android.system.OsConstants;
|
||||
@ -8,6 +10,7 @@ import android.util.Log;
|
||||
import androidx.annotation.NonNull;
|
||||
import androidx.annotation.Nullable;
|
||||
|
||||
import org.schabi.newpipe.App;
|
||||
import org.schabi.newpipe.DownloaderImpl;
|
||||
|
||||
import java.io.File;
|
||||
@ -21,16 +24,24 @@ import java.net.SocketTimeoutException;
|
||||
import java.net.URL;
|
||||
import java.net.UnknownHostException;
|
||||
import java.nio.channels.ClosedByInterruptException;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
import javax.net.ssl.SSLException;
|
||||
|
||||
import org.schabi.newpipe.extractor.Image;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfo;
|
||||
import org.schabi.newpipe.streams.io.StoredFileHelper;
|
||||
import org.schabi.newpipe.util.image.CoilHelper;
|
||||
import org.schabi.newpipe.util.image.ImageStrategy;
|
||||
import org.schabi.newpipe.util.image.PreferredImageQuality;
|
||||
|
||||
import us.shandian.giga.postprocessing.Postprocessing;
|
||||
import us.shandian.giga.service.DownloadManagerService;
|
||||
import us.shandian.giga.util.Utility;
|
||||
|
||||
import static org.schabi.newpipe.BuildConfig.DEBUG;
|
||||
import static org.schabi.newpipe.extractor.ServiceList.YouTube;
|
||||
|
||||
public class DownloadMission extends Mission {
|
||||
private static final long serialVersionUID = 6L;// last bump: 07 october 2019
|
||||
@ -58,6 +69,10 @@ public class DownloadMission extends Mission {
|
||||
public static final int ERROR_HTTP_NO_CONTENT = 204;
|
||||
static final int ERROR_HTTP_FORBIDDEN = 403;
|
||||
|
||||
private StreamInfo streamInfo;
|
||||
protected transient volatile Bitmap thumbnail;
|
||||
protected volatile boolean thumbnailFetched = false;
|
||||
|
||||
/**
|
||||
* The urls of the file to download
|
||||
*/
|
||||
@ -153,7 +168,8 @@ public class DownloadMission extends Mission {
|
||||
public transient Thread[] threads = new Thread[0];
|
||||
public transient Thread init = null;
|
||||
|
||||
public DownloadMission(String[] urls, StoredFileHelper storage, char kind, Postprocessing psInstance) {
|
||||
public DownloadMission(String[] urls, StoredFileHelper storage, char kind,
|
||||
Postprocessing psInstance, StreamInfo streamInfo, Context context) {
|
||||
if (Objects.requireNonNull(urls).length < 1)
|
||||
throw new IllegalArgumentException("urls array is empty");
|
||||
this.urls = urls;
|
||||
@ -163,6 +179,7 @@ public class DownloadMission extends Mission {
|
||||
this.maxRetry = 3;
|
||||
this.storage = storage;
|
||||
this.psAlgorithm = psInstance;
|
||||
this.streamInfo = streamInfo;
|
||||
|
||||
if (DEBUG && psInstance == null && urls.length > 1) {
|
||||
Log.w(TAG, "mission created with multiple urls ¿missing post-processing algorithm?");
|
||||
@ -698,6 +715,7 @@ public class DownloadMission extends Mission {
|
||||
Exception exception = null;
|
||||
|
||||
try {
|
||||
psAlgorithm.setThumbnail(thumbnail);
|
||||
psAlgorithm.run(this);
|
||||
} catch (Exception err) {
|
||||
Log.e(TAG, "Post-processing failed. " + psAlgorithm.toString(), err);
|
||||
@ -829,6 +847,47 @@ public class DownloadMission extends Mission {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads the thumbnail / cover art from a list of thumbnails.
|
||||
* The highest quality is selected.
|
||||
*
|
||||
* @param images the list of thumbnails
|
||||
*/
|
||||
public void fetchThumbnail(@NonNull final List<Image> images) {
|
||||
if (images.isEmpty()) {
|
||||
thumbnailFetched = true;
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// Some containers have a limited size for embedded images / metadata.
|
||||
// To avoid problems, we download a medium quality image.
|
||||
// Alternative approaches are to either downscale a high res image or
|
||||
// to download the correct size depending on the chosen post-processing algorithm.
|
||||
final String thumbnailUrl = ImageStrategy.choosePreferredImage(
|
||||
images, PreferredImageQuality.HIGH);
|
||||
// TODO: get context from somewhere else
|
||||
Bitmap originalThumbnail = CoilHelper.INSTANCE.loadBitmapBlocking(
|
||||
App.getInstance(), thumbnailUrl);
|
||||
|
||||
// YouTube Music streams have non square thumbnails to fit the player aspect ratio
|
||||
// of 16:9. We can safely crop the thumbnail to a square because the squared thumbnail
|
||||
// is padded with bars on the sides.
|
||||
if (originalThumbnail != null && streamInfo.getService().equals(YouTube)
|
||||
&& streamInfo.getSongMetadata() != null // i.e. YT Music stream
|
||||
&& originalThumbnail.getWidth() > originalThumbnail.getHeight()) {
|
||||
int cropSize = Math.min(originalThumbnail.getWidth(), originalThumbnail.getHeight());
|
||||
int xOffset = (originalThumbnail.getWidth() - cropSize) / 2;
|
||||
originalThumbnail = Bitmap.createBitmap(originalThumbnail, xOffset, 0,
|
||||
cropSize, cropSize);
|
||||
}
|
||||
this.thumbnail = originalThumbnail;
|
||||
thumbnailFetched = true;
|
||||
} catch (final Exception e) {
|
||||
Log.w(TAG, "fetchThumbnail: failed to load thumbnail", e);
|
||||
thumbnailFetched = true;
|
||||
}
|
||||
}
|
||||
|
||||
static class HttpError extends Exception {
|
||||
final int statusCode;
|
||||
|
||||
@ -30,7 +30,8 @@ class M4aNoDash extends Postprocessing {
|
||||
|
||||
@Override
|
||||
int process(SharpStream out, SharpStream... sources) throws IOException {
|
||||
Mp4FromDashWriter muxer = new Mp4FromDashWriter(sources[0]);
|
||||
Mp4FromDashWriter muxer = new Mp4FromDashWriter(
|
||||
this.embedMetadata, this.streamInfo, this.thumbnail, sources[0]);
|
||||
muxer.setMainBrand(0x4D344120);// binary string "M4A "
|
||||
muxer.parseSources();
|
||||
muxer.selectTracks(0);
|
||||
|
||||
@ -0,0 +1,326 @@
|
||||
package us.shandian.giga.postprocessing;
|
||||
|
||||
import static java.time.ZoneOffset.UTC;
|
||||
|
||||
import android.graphics.Bitmap;
|
||||
|
||||
import org.schabi.newpipe.streams.io.SharpInputStream;
|
||||
import org.schabi.newpipe.streams.io.SharpStream;
|
||||
import org.schabi.newpipe.util.StreamInfoMetadataHelper;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.PushbackInputStream;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.time.LocalDateTime;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.util.List;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
|
||||
/**
|
||||
* Adds Metadata tp to an MP3 file by writing ID3v2.4 frames, i.e. metadata tags,
|
||||
* at the start of the file.
|
||||
* @see <a href="https://id3.org/id3v2.4.0-structure">ID3v2.4 specification</a>
|
||||
* @see <a href="https://id3.org/id3v2.4.0-frames">ID3v2.4 frames</a>
|
||||
*/
|
||||
public class Mp3Metadata extends Postprocessing {
|
||||
|
||||
Mp3Metadata() {
|
||||
super(true, true, ALGORITHM_MP3_METADATA);
|
||||
}
|
||||
|
||||
@Override
|
||||
boolean test(SharpStream... sources) {
|
||||
return this.embedMetadata;
|
||||
}
|
||||
|
||||
@Override
|
||||
int process(SharpStream out, SharpStream... sources) throws IOException {
|
||||
if (sources == null || sources.length == 0 || sources[0] == null) {
|
||||
// nothing to do
|
||||
return OK_RESULT;
|
||||
}
|
||||
|
||||
// MP3 metadata is stored in ID3v2 tags at the start of the file,
|
||||
// so we need to build the tag in memory first and then write it
|
||||
// before copying the rest of the file.
|
||||
|
||||
final ByteArrayOutputStream frames = new ByteArrayOutputStream();
|
||||
final FrameWriter fw = new FrameWriter(frames);
|
||||
|
||||
makeMetadata(fw);
|
||||
makePictureFrame(fw);
|
||||
|
||||
byte[] framesBytes = frames.toByteArray();
|
||||
|
||||
|
||||
// ID3 header: 'ID3' + ver(0x04,0x00) + flags(0) + size (synchsafe 4 bytes)
|
||||
final ByteArrayOutputStream tag = new ByteArrayOutputStream();
|
||||
tag.write(new byte[]{'I', 'D', '3'});
|
||||
tag.write(0x04); // version 2.4
|
||||
tag.write(0x00); // revision
|
||||
tag.write(0x00); // flags
|
||||
int tagSize = framesBytes.length; // size excluding 10-byte header
|
||||
tag.write(toSynchsafe(tagSize));
|
||||
tag.write(framesBytes);
|
||||
|
||||
|
||||
byte[] tagBytes = tag.toByteArray();
|
||||
out.write(tagBytes);
|
||||
try (InputStream sIn = new SharpInputStream(sources[0])) {
|
||||
copyStreamSkippingId3(sIn, out);
|
||||
}
|
||||
out.flush();
|
||||
|
||||
return OK_RESULT;
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Write metadata frames based on the StreamInfo's metadata.
|
||||
* @see <a href="https://id3.org/id3v2.4.0-frames">ID3v2.4 frames</a> for a list of frame types
|
||||
* and their identifiers.
|
||||
* @param fw the FrameWriter to write frames to
|
||||
* @throws IOException if an I/O error occurs while writing frames
|
||||
*/
|
||||
private void makeMetadata(FrameWriter fw) throws IOException {
|
||||
var metadata = new StreamInfoMetadataHelper(this.streamInfo);
|
||||
|
||||
fw.writeTextFrame("TIT2", metadata.getTitle());
|
||||
fw.writeTextFrame("TPE1", metadata.getArtist());
|
||||
fw.writeTextFrame("TCOM", metadata.getComposer());
|
||||
fw.writeTextFrame("TIPL", metadata.getPerformer());
|
||||
fw.writeTextFrame("TCON", metadata.getGenre());
|
||||
fw.writeTextFrame("TALB", metadata.getAlbum());
|
||||
|
||||
final LocalDateTime releaseDate = metadata.getReleaseDate().getLocalDateTime(UTC);
|
||||
// determine precision by checking that lower-order fields are at their "zero"/start values
|
||||
boolean isOnlyMonth = releaseDate.getDayOfMonth() == 1
|
||||
&& releaseDate.getHour() == 0
|
||||
&& releaseDate.getMinute() == 0
|
||||
&& releaseDate.getSecond() == 0
|
||||
&& releaseDate.getNano() == 0;
|
||||
boolean isOnlyYear = releaseDate.getMonthValue() == 1
|
||||
&& isOnlyMonth;
|
||||
// see https://id3.org/id3v2.4.0-structure > 4. ID3v2 frame overview
|
||||
// for date formats in TDRC frame
|
||||
final String datePattern;
|
||||
if (isOnlyYear) {
|
||||
datePattern = "yyyy";
|
||||
} else if (isOnlyMonth) {
|
||||
datePattern = "yyyy-MM";
|
||||
} else {
|
||||
datePattern = "yyyy-MM-dd";
|
||||
}
|
||||
fw.writeTextFrame("TDRC",
|
||||
releaseDate.format(DateTimeFormatter.ofPattern(datePattern)));
|
||||
|
||||
|
||||
if (metadata.getTrackNumber() != null) {
|
||||
fw.writeTextFrame("TRCK", String.valueOf(metadata.getTrackNumber()));
|
||||
}
|
||||
|
||||
fw.writeTextFrame("TPUB", metadata.getRecordLabel());
|
||||
fw.writeTextFrame("TCOP", metadata.getCopyright());
|
||||
|
||||
// WXXX is a user defined URL link frame, we can use it to store the URL of the stream
|
||||
// However, since it's user defined, so not all players support it.
|
||||
// Using the comment frame (COMM) as fallback
|
||||
fw.writeTextFrame("WXXX", streamInfo.getUrl());
|
||||
fw.writeCommentFrame("eng", streamInfo.getUrl());
|
||||
}
|
||||
|
||||
/**
|
||||
* Write a picture frame (APIC) with the thumbnail image if available.
|
||||
* @param fw the FrameWriter to write the picture frame to
|
||||
* @throws IOException if an I/O error occurs while writing the frame
|
||||
*/
|
||||
private void makePictureFrame(FrameWriter fw) throws IOException {
|
||||
if (thumbnail != null) {
|
||||
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||
thumbnail.compress(Bitmap.CompressFormat.PNG, 100, baos);
|
||||
final byte[] imgBytes = baos.toByteArray();
|
||||
baos.close();
|
||||
fw.writePictureFrame("image/png", imgBytes);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Copy the input stream to the output stream, but if the input stream starts with an ID3v2 tag,
|
||||
* skip the tag and only copy the audio data.
|
||||
* @param in the input stream to read from (should be at the start of the MP3 file)
|
||||
* @param out the output stream to write to
|
||||
* @throws IOException if an I/O error occurs while reading or writing
|
||||
*/
|
||||
private static void copyStreamSkippingId3(InputStream in, SharpStream out) throws IOException {
|
||||
PushbackInputStream pin = (in instanceof PushbackInputStream) ? (PushbackInputStream) in : new PushbackInputStream(in, 10);
|
||||
byte[] header = new byte[10];
|
||||
int hr = pin.read(header);
|
||||
if (hr == 10 && header[0] == 'I' && header[1] == 'D' && header[2] == '3') {
|
||||
// bytes 3 and 4 are version and revision and byte 5 is flags
|
||||
// the size is stored as synchsafe at bytes 6..9
|
||||
int size = fromSynchsafe(header, 6);
|
||||
long remaining = size;
|
||||
// consume exactly 'size' bytes, i.e. the rest of the metadata frames, from the stream
|
||||
byte[] skipBuf = new byte[8192];
|
||||
while (remaining > 0) {
|
||||
int toRead = (int) Math.min(skipBuf.length, remaining);
|
||||
int r = pin.read(skipBuf, 0, toRead);
|
||||
if (r <= 0) break;
|
||||
remaining -= r;
|
||||
}
|
||||
} else {
|
||||
// push header bytes back so copy will include them
|
||||
if (hr > 0) pin.unread(header, 0, hr);
|
||||
}
|
||||
|
||||
// copy rest
|
||||
byte[] buf = new byte[8192];
|
||||
int r;
|
||||
while ((r = pin.read(buf)) > 0) out.write(buf, 0, r);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a 4-byte synchsafe integer from a regular integer value.
|
||||
* @see <a href="https://id3.org/id3v2.4.0-structure">ID3v2.4 specification</a> section
|
||||
* <i>6.2. Synchsafe integers</i>
|
||||
* @param value the integer value to convert (should be non-negative and less than 2^28)
|
||||
* @return the synchsafe byte array
|
||||
*/
|
||||
private static byte[] toSynchsafe(int value) {
|
||||
byte[] b = new byte[4];
|
||||
b[0] = (byte) ((value >> 21) & 0x7F);
|
||||
b[1] = (byte) ((value >> 14) & 0x7F);
|
||||
b[2] = (byte) ((value >> 7) & 0x7F);
|
||||
b[3] = (byte) (value & 0x7F);
|
||||
return b;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a regular integer from a 4-byte synchsafe byte array.
|
||||
* @see <a href="https://id3.org/id3v2.4.0-structure">ID3v2.4 specification</a> section
|
||||
* <i>6.2. Synchsafe integers</i>
|
||||
* @param b the byte array containing the synchsafe integer
|
||||
* (should be at least 4 bytes + offset long)
|
||||
* @param offset the offset in the byte array where the synchsafe integer starts
|
||||
* @return the regular integer value
|
||||
*/
|
||||
private static int fromSynchsafe(byte[] b, int offset) {
|
||||
return ((b[offset] & 0x7F) << 21)
|
||||
| ((b[offset + 1] & 0x7F) << 14)
|
||||
| ((b[offset + 2] & 0x7F) << 7)
|
||||
| (b[offset + 3] & 0x7F);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Helper class to write ID3v2.4 frames to a ByteArrayOutputStream.
|
||||
*/
|
||||
private static class FrameWriter {
|
||||
|
||||
/**
|
||||
* This separator is used to separate multiple entries in a list of an ID3v2 text frame.
|
||||
* @see <a href="https://id3.org/id3v2.4.0-frames">ID3v2.4 frames</a> section
|
||||
* <i>4.2. Text information frames</i>
|
||||
*/
|
||||
private static final Character TEXT_LIST_SEPARATOR = 0x00;
|
||||
private static final byte UTF8_ENCODING_BYTE = 0x03;
|
||||
|
||||
private final ByteArrayOutputStream out;
|
||||
|
||||
FrameWriter(ByteArrayOutputStream out) {
|
||||
this.out = out;
|
||||
}
|
||||
|
||||
/**
|
||||
* Write a text frame with the given identifier and text content.
|
||||
* @param id the 4 character long frame identifier
|
||||
* @param text the text content to write. If null or blank, no frame is written.
|
||||
* @throws IOException if an I/O error occurs while writing the frame
|
||||
*/
|
||||
void writeTextFrame(String id, String text) throws IOException {
|
||||
if (text == null || text.isBlank()) return;
|
||||
byte[] data = text.getBytes(StandardCharsets.UTF_8);
|
||||
ByteArrayOutputStream frame = new ByteArrayOutputStream();
|
||||
frame.write(UTF8_ENCODING_BYTE);
|
||||
frame.write(data);
|
||||
writeFrame(id, frame.toByteArray());
|
||||
}
|
||||
|
||||
/**
|
||||
* Write a text frame that can contain multiple entries separated by the
|
||||
* {@link #TEXT_LIST_SEPARATOR}.
|
||||
* @param id the 4 character long frame identifier
|
||||
* @param texts the list of text entries to write. If null or empty, no frame is written.
|
||||
* Blank or null entries are skipped.
|
||||
* @throws IOException if an I/O error occurs while writing the frame
|
||||
*/
|
||||
void writeTextFrame(String id, List<String> texts) throws IOException {
|
||||
if (texts == null || texts.isEmpty()) return;
|
||||
ByteArrayOutputStream frame = new ByteArrayOutputStream();
|
||||
frame.write(UTF8_ENCODING_BYTE);
|
||||
for (int i = 0; i < texts.size(); i++) {
|
||||
String text = texts.get(i);
|
||||
if (text != null && !text.isBlank()) {
|
||||
byte[] data = text.getBytes(StandardCharsets.UTF_8);
|
||||
frame.write(data);
|
||||
if (i < texts.size() - 1) {
|
||||
frame.write(TEXT_LIST_SEPARATOR);
|
||||
}
|
||||
}
|
||||
}
|
||||
writeFrame(id, frame.toByteArray());
|
||||
}
|
||||
|
||||
/**
|
||||
* Write a picture frame (APIC) with the given MIME type and image data.
|
||||
* @see <a href="https://id3.org/id3v2.4.0-frames">ID3v2.4 frames</a> section
|
||||
* <i>4.14. Attached picture</i>
|
||||
* @param mimeType the MIME type of the image (e.g. "image/png" or "image/jpeg").
|
||||
* @param imageData the binary data of the image. If empty, no frame is written.
|
||||
* @throws IOException
|
||||
*/
|
||||
void writePictureFrame(@Nonnull String mimeType, @Nonnull byte[] imageData)
|
||||
throws IOException {
|
||||
if (imageData.length == 0) return;
|
||||
ByteArrayOutputStream frame = new ByteArrayOutputStream();
|
||||
frame.write(UTF8_ENCODING_BYTE);
|
||||
frame.write(mimeType.getBytes(StandardCharsets.US_ASCII));
|
||||
frame.write(0x00);
|
||||
frame.write(0x03); // picture type: 3 = cover(front)
|
||||
frame.write(0x00); // empty description terminator (UTF-8 empty string)
|
||||
// Then the picture bytes
|
||||
frame.write(imageData);
|
||||
writeFrame("APIC", frame.toByteArray());
|
||||
}
|
||||
|
||||
/**
|
||||
* Write a comment frame (COMM) with the given language and comment text.
|
||||
* @param lang a 3-character ISO-639-2 language code (e.g. "eng" for English).
|
||||
* If null or invalid, defaults to "eng".
|
||||
* @param comment the comment text to write. If null, no frame is written.
|
||||
* @throws IOException
|
||||
*/
|
||||
void writeCommentFrame(String lang, String comment) throws IOException {
|
||||
if (comment == null) return;
|
||||
if (lang == null || lang.length() != 3) lang = "eng";
|
||||
ByteArrayOutputStream frame = new ByteArrayOutputStream();
|
||||
frame.write(UTF8_ENCODING_BYTE);
|
||||
frame.write(lang.getBytes(StandardCharsets.US_ASCII));
|
||||
frame.write(0x00); // short content descriptor (empty) terminator
|
||||
frame.write(comment.getBytes(StandardCharsets.UTF_8));
|
||||
writeFrame("COMM", frame.toByteArray());
|
||||
}
|
||||
|
||||
private void writeFrame(String id, byte[] data) throws IOException {
|
||||
if (data == null || data.length == 0) return;
|
||||
// frame header: id(4) size(4 synchsafe) flags(2)
|
||||
out.write(id.getBytes(StandardCharsets.US_ASCII));
|
||||
out.write(toSynchsafe(data.length));
|
||||
out.write(new byte[]{0x00, 0x00});
|
||||
out.write(data);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -16,7 +16,8 @@ class Mp4FromDashMuxer extends Postprocessing {
|
||||
|
||||
@Override
|
||||
int process(SharpStream out, SharpStream... sources) throws IOException {
|
||||
Mp4FromDashWriter muxer = new Mp4FromDashWriter(sources);
|
||||
Mp4FromDashWriter muxer = new Mp4FromDashWriter(
|
||||
this.embedMetadata, this.streamInfo, this.thumbnail, sources);
|
||||
muxer.parseSources();
|
||||
muxer.selectTracks(0, 0);
|
||||
muxer.build(out);
|
||||
|
||||
@ -0,0 +1,476 @@
|
||||
package us.shandian.giga.postprocessing;
|
||||
|
||||
import org.schabi.newpipe.streams.io.SharpStream;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.Arrays;
|
||||
|
||||
/**
|
||||
* Postprocessing algorithm to insert metadata into an existing MP4 file
|
||||
* by modifying/adding the 'udta' box inside 'moov'.
|
||||
*
|
||||
* @see <a href="https://atomicparsley.sourceforge.net/mpeg-4files.html">
|
||||
* https://atomicparsley.sourceforge.net/mpeg-4files.html</a> for a quick summary on
|
||||
* the MP4 file format and its specification.
|
||||
* @see <a href="https://developer.apple.com/documentation/quicktime-file-format/">
|
||||
* Apple Quick Time Format Specification</a> which is the basis for MP4 file format
|
||||
* and contains detailed information about the structure of MP4 files.
|
||||
* @see <a href="https://developer.apple.com/documentation/quicktime-file-format/
|
||||
* * user_data_atoms">Apple Quick Time Format Specification for user data atoms (udta)</a>
|
||||
*/
|
||||
public class Mp4Metadata extends Postprocessing {
|
||||
|
||||
Mp4Metadata() {
|
||||
super(false, true, ALGORITHM_MP4_METADATA);
|
||||
}
|
||||
|
||||
@Override
|
||||
boolean test(SharpStream... sources) throws IOException {
|
||||
// nothing to do if metadata should not be embedded
|
||||
if (!embedMetadata) return false;
|
||||
|
||||
// quick check: ensure there's at least one source and it looks like an MP4,
|
||||
// i.e. the file has a 'moov' box near the beginning.
|
||||
// THe 'udta' box is inserted inside 'moov', so if there's no 'moov' we can't do anything.
|
||||
if (sources == null || sources.length == 0 || sources[0] == null) return false;
|
||||
|
||||
final SharpStream src = sources[0];
|
||||
try {
|
||||
src.rewind();
|
||||
|
||||
// scan first few boxes until we find moov or reach a reasonable limit
|
||||
final int MAX_SCAN = 8 * 1024 * 1024; // 8 MiB
|
||||
int scanned = 0;
|
||||
|
||||
while (scanned < MAX_SCAN) {
|
||||
// read header
|
||||
byte[] header = new byte[8];
|
||||
int r = readFully(src, header, 0, 8);
|
||||
if (r < 8) break;
|
||||
|
||||
final int boxSize = ByteBuffer.wrap(header, 0, 4).getInt();
|
||||
final int boxType = ByteBuffer.wrap(header, 4, 4).getInt();
|
||||
|
||||
if (boxType == 0x6D6F6F76) { // "moov"
|
||||
return true;
|
||||
}
|
||||
|
||||
long skip = (boxSize > 8) ? (boxSize - 8) : 0;
|
||||
// boxSize == 0 means extends to EOF -> stop scanning
|
||||
if (boxSize == 0) break;
|
||||
|
||||
// attempt skip
|
||||
long skipped = src.skip(skip);
|
||||
if (skipped < skip) break;
|
||||
|
||||
scanned += 8 + (int) skip;
|
||||
}
|
||||
|
||||
return false;
|
||||
} finally {
|
||||
// best-effort rewind; ignore problems here
|
||||
try {
|
||||
src.rewind();
|
||||
} catch (IOException ignored) {
|
||||
// nothing to do
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
int process(SharpStream out, SharpStream... sources) throws IOException {
|
||||
if (sources == null || sources.length == 0) return OK_RESULT;
|
||||
|
||||
final SharpStream src = sources[0];
|
||||
src.rewind();
|
||||
|
||||
// helper buffer for copy
|
||||
final byte[] buf = new byte[64 * 1024];
|
||||
|
||||
// copy until moov
|
||||
while (true) {
|
||||
// read header
|
||||
byte[] header = new byte[8];
|
||||
int h = readFully(src, header, 0, 8);
|
||||
if (h < 8) {
|
||||
// no more data, nothing to do
|
||||
return OK_RESULT;
|
||||
}
|
||||
|
||||
final int boxSize = ByteBuffer.wrap(header, 0, 4).getInt();
|
||||
final int boxType = ByteBuffer.wrap(header, 4, 4).getInt();
|
||||
|
||||
if (boxType != 0x6D6F6F76) { // not "moov" -> copy whole box
|
||||
// write header
|
||||
out.write(header);
|
||||
|
||||
long remaining = (boxSize > 8) ? (boxSize - 8) : 0;
|
||||
if (boxSize == 0) {
|
||||
// box extends to EOF: copy rest and return
|
||||
int r;
|
||||
while ((r = src.read(buf)) > 0) {
|
||||
out.write(buf, 0, r);
|
||||
}
|
||||
return OK_RESULT;
|
||||
}
|
||||
|
||||
while (remaining > 0) {
|
||||
int read = src.read(buf, 0, (int) Math.min(buf.length, remaining));
|
||||
if (read <= 0) break;
|
||||
out.write(buf, 0, read);
|
||||
remaining -= read;
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
// found moov. read full moov box into memory
|
||||
long moovSize = boxSize;
|
||||
boolean hasLargeSize = false;
|
||||
if (moovSize == 1) {
|
||||
// extended size: read 8 bytes
|
||||
byte[] ext = new byte[8];
|
||||
readFully(src, ext, 0, 8);
|
||||
moovSize = ByteBuffer.wrap(ext).getLong();
|
||||
hasLargeSize = true;
|
||||
}
|
||||
|
||||
if (moovSize < 8) {
|
||||
// malformed
|
||||
return OK_RESULT;
|
||||
}
|
||||
|
||||
final int toRead = (int) (moovSize - (hasLargeSize ? 16 : 8));
|
||||
final byte[] moovPayload = new byte[toRead];
|
||||
readFully(src, moovPayload, 0, toRead);
|
||||
|
||||
// search for udta inside moov
|
||||
int udtaIndex = indexOfBox(moovPayload, 0x75647461); // "udta"
|
||||
|
||||
if (udtaIndex < 0) {
|
||||
// no udta: build udta using helper and insert before first 'trak' atom
|
||||
byte[] udtaBytes = buildUdta();
|
||||
|
||||
int insertPos = indexOfBox(moovPayload, 0x7472616B); // "trak"
|
||||
if (insertPos < 0) insertPos = moovPayload.length;
|
||||
|
||||
byte[] newPayload = new byte[moovPayload.length + udtaBytes.length];
|
||||
System.arraycopy(moovPayload, 0, newPayload, 0, insertPos);
|
||||
System.arraycopy(udtaBytes, 0, newPayload, insertPos, udtaBytes.length);
|
||||
System.arraycopy(moovPayload, insertPos, newPayload, insertPos + udtaBytes.length,
|
||||
moovPayload.length - insertPos);
|
||||
|
||||
long newMoovSize = moovSize + udtaBytes.length;
|
||||
long delta = newMoovSize - moovSize;
|
||||
|
||||
// adjust chunk offsets in the new payload so stco/co64 entries point to correct mdat offsets
|
||||
adjustChunkOffsetsRecursive(newPayload, 0, newPayload.length, delta);
|
||||
|
||||
// write updated moov header
|
||||
if (hasLargeSize) {
|
||||
out.write(intToBytes(1));
|
||||
out.write(intToBytes(0x6D6F6F76)); // "moov"
|
||||
out.write(longToBytes(newMoovSize));
|
||||
} else {
|
||||
out.write(intToBytes((int) newMoovSize));
|
||||
out.write(intToBytes(0x6D6F6F76)); // "moov"
|
||||
}
|
||||
|
||||
out.write(newPayload);
|
||||
|
||||
} else {
|
||||
// udta exists: replace the existing udta box with newly built udta
|
||||
// determine old udta size (support extended size and size==0 -> till end of moov)
|
||||
if (udtaIndex + 8 > moovPayload.length) {
|
||||
// malformed; just write original and continue
|
||||
if (hasLargeSize) {
|
||||
out.write(intToBytes(1));
|
||||
out.write(intToBytes(0x6D6F6F76)); // "moov"
|
||||
out.write(longToBytes(moovSize));
|
||||
} else {
|
||||
out.write(intToBytes((int) moovSize));
|
||||
out.write(intToBytes(0x6D6F6F76)); // "moov"
|
||||
}
|
||||
out.write(moovPayload);
|
||||
} else {
|
||||
int sizeField = readUInt32(moovPayload, udtaIndex);
|
||||
long oldUdtaSize;
|
||||
if (sizeField == 1) {
|
||||
// extended
|
||||
if (udtaIndex + 16 > moovPayload.length) {
|
||||
oldUdtaSize = ((long) moovPayload.length) - udtaIndex; // fallback
|
||||
} else {
|
||||
oldUdtaSize = readUInt64(moovPayload, udtaIndex + 8);
|
||||
}
|
||||
} else if (sizeField == 0) {
|
||||
// until end of file/moov
|
||||
oldUdtaSize = ((long) moovPayload.length) - udtaIndex;
|
||||
} else {
|
||||
oldUdtaSize = sizeField & 0xFFFFFFFFL;
|
||||
}
|
||||
|
||||
// compute the integer length (bounded by remaining payload)
|
||||
int oldUdtaIntLen = (int) Math.min(oldUdtaSize, (moovPayload.length - udtaIndex));
|
||||
|
||||
// build new udta
|
||||
byte[] newUdta = buildUdta();
|
||||
|
||||
// If new udta fits into old udta area, overwrite in place and keep moov size unchanged
|
||||
if (newUdta.length <= oldUdtaIntLen) {
|
||||
byte[] newPayload = new byte[moovPayload.length];
|
||||
// copy prefix
|
||||
System.arraycopy(moovPayload, 0, newPayload, 0, udtaIndex);
|
||||
// copy new udta
|
||||
System.arraycopy(newUdta, 0, newPayload, udtaIndex, newUdta.length);
|
||||
// pad remaining old udta space with zeros
|
||||
int padStart = udtaIndex + newUdta.length;
|
||||
int padLen = oldUdtaIntLen - newUdta.length;
|
||||
if (padLen > 0) {
|
||||
Arrays.fill(newPayload, padStart, padStart + padLen, (byte) 0);
|
||||
}
|
||||
// copy suffix
|
||||
int suffixStart = udtaIndex + oldUdtaIntLen;
|
||||
System.arraycopy(moovPayload, suffixStart, newPayload, udtaIndex + oldUdtaIntLen,
|
||||
moovPayload.length - suffixStart);
|
||||
|
||||
// moovSize unchanged
|
||||
if (hasLargeSize) {
|
||||
out.write(intToBytes(1));
|
||||
out.write(intToBytes(0x6D6F6F76));
|
||||
out.write(longToBytes(moovSize));
|
||||
} else {
|
||||
out.write(intToBytes((int) moovSize));
|
||||
out.write(intToBytes(0x6D6F6F76));
|
||||
}
|
||||
out.write(newPayload);
|
||||
|
||||
} else {
|
||||
// construct new moov payload by replacing the old udta region (previous behavior)
|
||||
int newPayloadLen = moovPayload.length - oldUdtaIntLen + newUdta.length;
|
||||
byte[] newPayload = new byte[newPayloadLen];
|
||||
|
||||
// copy prefix
|
||||
System.arraycopy(moovPayload, 0, newPayload, 0, udtaIndex);
|
||||
// copy new udta
|
||||
System.arraycopy(newUdta, 0, newPayload, udtaIndex, newUdta.length);
|
||||
// copy suffix
|
||||
int suffixStart = udtaIndex + oldUdtaIntLen;
|
||||
System.arraycopy(moovPayload, suffixStart, newPayload, udtaIndex + newUdta.length,
|
||||
moovPayload.length - suffixStart);
|
||||
|
||||
long newMoovSize = moovSize - oldUdtaSize + newUdta.length;
|
||||
long delta = newMoovSize - moovSize;
|
||||
|
||||
// adjust chunk offsets in the new payload so stco/co64 entries point to correct mdat offsets
|
||||
adjustChunkOffsetsRecursive(newPayload, 0, newPayload.length, delta);
|
||||
|
||||
// write updated moov header
|
||||
if (hasLargeSize) {
|
||||
out.write(intToBytes(1));
|
||||
out.write(intToBytes(0x6D6F6F76)); // "moov"
|
||||
out.write(longToBytes(newMoovSize));
|
||||
} else {
|
||||
out.write(intToBytes((int) newMoovSize));
|
||||
out.write(intToBytes(0x6D6F6F76)); // "moov"
|
||||
}
|
||||
|
||||
out.write(newPayload);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// copy rest of file
|
||||
int r;
|
||||
while ((r = src.read(buf)) > 0) {
|
||||
out.write(buf, 0, r);
|
||||
}
|
||||
|
||||
return OK_RESULT;
|
||||
}
|
||||
}
|
||||
|
||||
private void adjustChunkOffsetsRecursive(byte[] payload, int start,
|
||||
int length, long delta) throws IOException {
|
||||
int idx = start;
|
||||
final int end = start + length;
|
||||
while (idx + 8 <= end) {
|
||||
int boxSize = readUInt32(payload, idx);
|
||||
int boxType = readUInt32(payload, idx + 4);
|
||||
|
||||
if (boxSize == 0) {
|
||||
// box extends to end of parent
|
||||
boxSize = end - idx;
|
||||
} else if (boxSize < 0) {
|
||||
break;
|
||||
}
|
||||
|
||||
int headerLen = 8;
|
||||
long declaredSize = ((long) boxSize) & 0xFFFFFFFFL;
|
||||
if (boxSize == 1) {
|
||||
// extended size
|
||||
if (idx + 16 > end) break;
|
||||
declaredSize = readUInt64(payload, idx + 8);
|
||||
headerLen = 16;
|
||||
}
|
||||
|
||||
int contentStart = idx + headerLen;
|
||||
int contentLen = (int) (declaredSize - headerLen);
|
||||
if (contentLen < 0 || contentStart + contentLen > end) {
|
||||
// invalid, stop
|
||||
break;
|
||||
}
|
||||
|
||||
if (boxType == 0x7374636F) { // 'stco'
|
||||
// version/flags(4) entry_count(4) entries
|
||||
int entryCountOff = contentStart + 4;
|
||||
if (entryCountOff + 4 > end) return;
|
||||
int count = readUInt32(payload, entryCountOff);
|
||||
int entriesStart = entryCountOff + 4;
|
||||
for (int i = 0; i < count; i++) {
|
||||
int entryOff = entriesStart + i * 4;
|
||||
if (entryOff + 4 > end) break;
|
||||
long val = ((long) readUInt32(payload, entryOff)) & 0xFFFFFFFFL;
|
||||
long newVal = val + delta;
|
||||
if (newVal < 0 || newVal > 0xFFFFFFFFL) {
|
||||
throw new IOException("stco entry overflow after applying delta");
|
||||
}
|
||||
putUInt32(payload, entryOff, (int) newVal);
|
||||
}
|
||||
} else if (boxType == 0x636F3634) { // 'co64'
|
||||
int entryCountOff = contentStart + 4;
|
||||
if (entryCountOff + 4 > end) return;
|
||||
int count = readUInt32(payload, entryCountOff);
|
||||
int entriesStart = entryCountOff + 4;
|
||||
for (int i = 0; i < count; i++) {
|
||||
int entryOff = entriesStart + i * 8;
|
||||
if (entryOff + 8 > end) break;
|
||||
long val = readUInt64(payload, entryOff);
|
||||
long newVal = val + delta;
|
||||
putUInt64(payload, entryOff, newVal);
|
||||
}
|
||||
} else {
|
||||
// recurse into container boxes
|
||||
if (contentLen >= 8) {
|
||||
adjustChunkOffsetsRecursive(payload, contentStart, contentLen, delta);
|
||||
}
|
||||
}
|
||||
|
||||
idx += (int) declaredSize;
|
||||
}
|
||||
}
|
||||
|
||||
private static int readUInt32(byte[] buf, int off) {
|
||||
return ((buf[off] & 0xFF) << 24) | ((buf[off + 1] & 0xFF) << 16)
|
||||
| ((buf[off + 2] & 0xFF) << 8) | (buf[off + 3] & 0xFF);
|
||||
}
|
||||
|
||||
private static long readUInt64(byte[] buf, int off) {
|
||||
return ((long) readUInt32(buf, off) << 32) | ((long) readUInt32(buf, off + 4) & 0xFFFFFFFFL);
|
||||
}
|
||||
|
||||
private static void putUInt32(byte[] buf, int off, int v) {
|
||||
buf[off] = (byte) ((v >>> 24) & 0xFF);
|
||||
buf[off + 1] = (byte) ((v >>> 16) & 0xFF);
|
||||
buf[off + 2] = (byte) ((v >>> 8) & 0xFF);
|
||||
buf[off + 3] = (byte) (v & 0xFF);
|
||||
}
|
||||
|
||||
private static void putUInt64(byte[] buf, int off, long v) {
|
||||
putUInt32(buf, off, (int) ((v >>> 32) & 0xFFFFFFFFL));
|
||||
putUInt32(buf, off + 4, (int) (v & 0xFFFFFFFFL));
|
||||
}
|
||||
|
||||
private static int readFully(SharpStream in, byte[] buf, int off, int len) throws IOException {
|
||||
int readTotal = 0;
|
||||
while (readTotal < len) {
|
||||
int r = in.read(buf, off + readTotal, len - readTotal);
|
||||
if (r <= 0) break;
|
||||
readTotal += r;
|
||||
}
|
||||
return readTotal;
|
||||
}
|
||||
|
||||
private static int indexOfBox(byte[] payload, int boxType) {
|
||||
int idx = 0;
|
||||
while (idx + 8 <= payload.length) {
|
||||
int size = readUInt32(payload, idx);
|
||||
int type = readUInt32(payload, idx + 4);
|
||||
if (type == boxType) return idx;
|
||||
if (size <= 0) break;
|
||||
idx += size;
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
private static byte[] intToBytes(int v) {
|
||||
return ByteBuffer.allocate(4).putInt(v).array();
|
||||
}
|
||||
|
||||
private static byte[] longToBytes(long v) {
|
||||
return ByteBuffer.allocate(8).putLong(v).array();
|
||||
}
|
||||
|
||||
/**
|
||||
* Build udta bytes using {@link Mp4MetadataHelper}.
|
||||
*/
|
||||
private byte[] buildUdta() throws IOException {
|
||||
final GrowableByteArray aux = new GrowableByteArray(Math.max(64 * 1024, 256 * 1024));
|
||||
|
||||
final Mp4MetadataHelper helper = new Mp4MetadataHelper(
|
||||
aux::position,
|
||||
aux::put,
|
||||
offset -> {
|
||||
int size = aux.position() - offset;
|
||||
aux.putInt(offset, size);
|
||||
return size;
|
||||
},
|
||||
streamInfo,
|
||||
thumbnail
|
||||
);
|
||||
|
||||
helper.makeUdta();
|
||||
|
||||
return aux.toByteArray();
|
||||
}
|
||||
|
||||
/**
|
||||
* Small growable byte array helper with minimal random-access putInt support
|
||||
*/
|
||||
private static final class GrowableByteArray {
|
||||
private byte[] buf;
|
||||
private int pos = 0;
|
||||
|
||||
GrowableByteArray(int initial) {
|
||||
buf = new byte[initial];
|
||||
}
|
||||
|
||||
int position() { return pos; }
|
||||
|
||||
void put(byte[] data) {
|
||||
ensureCapacity(pos + data.length);
|
||||
System.arraycopy(data, 0, buf, pos, data.length);
|
||||
pos += data.length;
|
||||
}
|
||||
|
||||
void putInt(int offset, int value) {
|
||||
ensureCapacity(offset + 4);
|
||||
buf[offset] = (byte) ((value >>> 24) & 0xff);
|
||||
buf[offset + 1] = (byte) ((value >>> 16) & 0xff);
|
||||
buf[offset + 2] = (byte) ((value >>> 8) & 0xff);
|
||||
buf[offset + 3] = (byte) (value & 0xff);
|
||||
}
|
||||
|
||||
private void ensureCapacity(int min) {
|
||||
if (min <= buf.length) return;
|
||||
int newCap = buf.length * 2;
|
||||
while (newCap < min) newCap *= 2;
|
||||
buf = Arrays.copyOf(buf, newCap);
|
||||
}
|
||||
|
||||
byte[] toByteArray() {
|
||||
return Arrays.copyOf(buf, pos);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,224 @@
|
||||
package us.shandian.giga.postprocessing;
|
||||
|
||||
import android.graphics.Bitmap;
|
||||
|
||||
import androidx.annotation.Nullable;
|
||||
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfo;
|
||||
import org.schabi.newpipe.util.StreamInfoMetadataHelper;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
|
||||
public final class Mp4MetadataHelper {
|
||||
|
||||
@Nullable
|
||||
final StreamInfo streamInfo;
|
||||
@Nullable final Bitmap thumbnail;
|
||||
@Nonnull final Supplier<Integer> auxOffset;
|
||||
@Nonnull final Consumer<byte[]> auxWriteBytes;
|
||||
@Nonnull final Function<Integer, Integer> lengthFor;
|
||||
public Mp4MetadataHelper(@Nonnull Supplier<Integer> auxOffset,
|
||||
@Nonnull Consumer<byte[]> auxWriteBytes,
|
||||
@Nonnull Function<Integer, Integer> lengthFor,
|
||||
@Nullable final StreamInfo streamInfo,
|
||||
@Nullable final Bitmap thumbnail) {
|
||||
this.auxOffset = auxOffset;
|
||||
this.auxWriteBytes = auxWriteBytes;
|
||||
this.lengthFor = lengthFor;
|
||||
this.streamInfo = streamInfo;
|
||||
this.thumbnail = thumbnail;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the 'udta' box with metadata fields.
|
||||
* {@code udta} is a user data box that can contain various types of metadata,
|
||||
* including title, artist, date, and cover art.
|
||||
* @see <a href="https://developer.apple.com/documentation/quicktime-file-format/
|
||||
* user_data_atoms">Apple Quick Time Format Specification for user data atoms</a>
|
||||
* @see <a href="https://wiki.multimedia.cx/index.php?title=FFmpeg_Metadata
|
||||
* #QuickTime/MOV/MP4/M4A/et_al.">Multimedia Wiki FFmpeg Metadata</a>
|
||||
* @see <a href="https://atomicparsley.sourceforge.net/mpeg-4files.html">atomicparsley docs</a>
|
||||
* for a short and understandable reference about metadata keys and values
|
||||
* @throws IOException
|
||||
*/
|
||||
public void makeUdta() throws IOException {
|
||||
if (streamInfo == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
// udta
|
||||
final int startUdta = auxOffset.get();
|
||||
auxWriteBytes.accept(ByteBuffer.allocate(8).putInt(0).putInt(0x75647461).array()); // "udta"
|
||||
|
||||
// meta (full box: type + version/flags)
|
||||
final int startMeta = auxOffset.get();
|
||||
auxWriteBytes.accept(ByteBuffer.allocate(8).putInt(0).putInt(0x6D657461).array()); // "meta"
|
||||
auxWriteBytes.accept(ByteBuffer.allocate(4).putInt(0).array()); // version & flags = 0
|
||||
|
||||
// hdlr inside meta
|
||||
auxWriteBytes.accept(makeMetaHdlr());
|
||||
|
||||
// ilst container
|
||||
final int startIlst = auxOffset.get();
|
||||
auxWriteBytes.accept(ByteBuffer.allocate(8).putInt(0).putInt(0x696C7374).array()); // "ilst"
|
||||
|
||||
// write metadata items
|
||||
|
||||
final var metaHelper = new StreamInfoMetadataHelper(streamInfo);
|
||||
final String title = metaHelper.getTitle();
|
||||
final String artist = metaHelper.getArtist();
|
||||
final String date = metaHelper.getReleaseDate().getLocalDateTime()
|
||||
.toLocalDate().toString();
|
||||
final String recordLabel = metaHelper.getRecordLabel();
|
||||
final String copyright = metaHelper.getCopyright();
|
||||
|
||||
if (title != null && !title.isEmpty()) {
|
||||
writeMetaItem("©nam", title);
|
||||
}
|
||||
if (artist != null && !artist.isEmpty()) {
|
||||
writeMetaItem("©ART", artist);
|
||||
}
|
||||
if (date != null && !date.isEmpty()) {
|
||||
// this means 'year' in mp4 metadata, who the hell thought that?
|
||||
writeMetaItem("©day", date);
|
||||
}
|
||||
if (recordLabel != null && !recordLabel.isEmpty()) {
|
||||
writeMetaItem("©lab", recordLabel);
|
||||
}
|
||||
if (copyright != null && !copyright.isEmpty()) {
|
||||
writeMetaItem("©cpy", copyright);
|
||||
}
|
||||
|
||||
if (thumbnail != null) {
|
||||
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||
thumbnail.compress(Bitmap.CompressFormat.PNG, 100, baos);
|
||||
final byte[] imgBytes = baos.toByteArray();
|
||||
baos.close();
|
||||
// 0x0000000E = PNG type indicator for 'data' box (0x0D = JPEG)
|
||||
writeMetaCover(imgBytes, 0x0000000E);
|
||||
|
||||
}
|
||||
|
||||
// fix lengths
|
||||
lengthFor.apply(startIlst);
|
||||
lengthFor.apply(startMeta);
|
||||
lengthFor.apply(startUdta);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to write a metadata item inside the 'ilst' box.
|
||||
*
|
||||
* <pre>
|
||||
* [size][key] [data_box]
|
||||
* data_box = [size]["data"][type(4bytes)=1][locale(4bytes)=0][payload]
|
||||
* </pre>
|
||||
*
|
||||
* @param keyStr 4-char metadata key
|
||||
* @param value the metadata value
|
||||
* @throws IOException
|
||||
*/
|
||||
private void writeMetaItem(final String keyStr, final String value) throws IOException {
|
||||
final byte[] valBytes = value.getBytes(StandardCharsets.UTF_8);
|
||||
final byte[] keyBytes = keyStr.getBytes(StandardCharsets.ISO_8859_1);
|
||||
|
||||
final int dataBoxSize = 16 + valBytes.length; // 4(size)+4("data")+4(type/locale)+payload
|
||||
final int itemBoxSize = 8 + dataBoxSize; // 4(size)+4(key)+dataBox
|
||||
|
||||
final ByteBuffer buf = ByteBuffer.allocate(itemBoxSize);
|
||||
buf.putInt(itemBoxSize);
|
||||
// key (4 bytes)
|
||||
if (keyBytes.length == 4) {
|
||||
buf.put(keyBytes);
|
||||
} else {
|
||||
// fallback: pad or truncate
|
||||
final byte[] kb = new byte[4];
|
||||
System.arraycopy(keyBytes, 0, kb, 0, Math.min(keyBytes.length, 4));
|
||||
buf.put(kb);
|
||||
}
|
||||
|
||||
// data box
|
||||
buf.putInt(dataBoxSize);
|
||||
buf.putInt(0x64617461); // "data"
|
||||
buf.putInt(0x00000001); // well-known type indicator (UTF-8)
|
||||
buf.putInt(0x00000000); // locale
|
||||
buf.put(valBytes);
|
||||
|
||||
auxWriteBytes.accept(buf.array());
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a minimal hdlr box for the meta container.
|
||||
* The boxsize is fixed (33 bytes) as no name is provided.
|
||||
* @return byte array with the hdlr box
|
||||
*/
|
||||
private byte[] makeMetaHdlr() {
|
||||
final ByteBuffer buf = ByteBuffer.allocate(33);
|
||||
buf.putInt(33);
|
||||
buf.putInt(0x68646C72); // "hdlr"
|
||||
buf.putInt(0x00000000); // pre-defined
|
||||
buf.putInt(0x6D646972); // "mdir" handler_type (metadata directory)
|
||||
buf.putInt(0x00000000); // subtype / reserved
|
||||
buf.put(new byte[12]); // reserved
|
||||
buf.put((byte) 0x00); // name (empty, null-terminated)
|
||||
return buf.array();
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to add cover image inside the 'udta' box.
|
||||
* <p>
|
||||
* This method writes the 'covr' metadata item which contains the cover image.
|
||||
* The cover image is displayed as thumbnail in many media players and file managers.
|
||||
* </p>
|
||||
* <pre>
|
||||
* [size][key] [data_box]
|
||||
* data_box = [size]["data"][type(4bytes)][locale(4bytes)=0][payload]
|
||||
* </pre>
|
||||
*
|
||||
* @param imageData image byte data
|
||||
* @param dataType type indicator: 0x0000000E = PNG, 0x0000000D = JPEG
|
||||
* @throws IOException
|
||||
*/
|
||||
private void writeMetaCover(final byte[] imageData, final int dataType) throws IOException {
|
||||
if (imageData == null || imageData.length == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
final byte[] keyBytes = "covr".getBytes(StandardCharsets.ISO_8859_1);
|
||||
|
||||
// data box: 4(size) + 4("data") + 4(type) + 4(locale) + payload
|
||||
final int dataBoxSize = 16 + imageData.length;
|
||||
final int itemBoxSize = 8 + dataBoxSize;
|
||||
|
||||
final ByteBuffer buf = ByteBuffer.allocate(itemBoxSize);
|
||||
buf.putInt(itemBoxSize);
|
||||
|
||||
// key (4 chars)
|
||||
if (keyBytes.length == 4) {
|
||||
buf.put(keyBytes);
|
||||
} else {
|
||||
final byte[] kb = new byte[4];
|
||||
System.arraycopy(keyBytes, 0, kb, 0, Math.min(keyBytes.length, 4));
|
||||
buf.put(kb);
|
||||
}
|
||||
|
||||
// data box
|
||||
buf.putInt(dataBoxSize);
|
||||
buf.putInt(0x64617461); // "data"
|
||||
buf.putInt(dataType); // type indicator: 0x0000000E = PNG, 0x0000000D = JPEG
|
||||
buf.putInt(0x00000000); // locale
|
||||
buf.put(imageData);
|
||||
|
||||
auxWriteBytes.accept(buf.array());
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
@ -34,7 +34,8 @@ class OggFromWebmDemuxer extends Postprocessing {
|
||||
|
||||
@Override
|
||||
int process(SharpStream out, @NonNull SharpStream... sources) throws IOException {
|
||||
OggFromWebMWriter demuxer = new OggFromWebMWriter(sources[0], out, streamInfo);
|
||||
OggFromWebMWriter demuxer = new OggFromWebMWriter(
|
||||
sources[0], out, embedMetadata, streamInfo, thumbnail);
|
||||
demuxer.parseSource();
|
||||
demuxer.selectTrack(0);
|
||||
demuxer.build();
|
||||
|
||||
@ -1,8 +1,10 @@
|
||||
package us.shandian.giga.postprocessing;
|
||||
|
||||
import android.graphics.Bitmap;
|
||||
import android.util.Log;
|
||||
|
||||
import androidx.annotation.NonNull;
|
||||
import androidx.annotation.Nullable;
|
||||
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfo;
|
||||
import org.schabi.newpipe.streams.io.SharpStream;
|
||||
@ -23,16 +25,20 @@ import static us.shandian.giga.get.DownloadMission.ERROR_POSTPROCESSING_HOLD;
|
||||
|
||||
public abstract class Postprocessing implements Serializable {
|
||||
|
||||
static transient final byte OK_RESULT = ERROR_NOTHING;
|
||||
static final byte OK_RESULT = ERROR_NOTHING;
|
||||
|
||||
public transient static final String ALGORITHM_TTML_CONVERTER = "ttml";
|
||||
public transient static final String ALGORITHM_WEBM_MUXER = "webm";
|
||||
public transient static final String ALGORITHM_MP4_FROM_DASH_MUXER = "mp4D-mp4";
|
||||
public transient static final String ALGORITHM_M4A_NO_DASH = "mp4D-m4a";
|
||||
public transient static final String ALGORITHM_OGG_FROM_WEBM_DEMUXER = "webm-ogg-d";
|
||||
public static final String ALGORITHM_TTML_CONVERTER = "ttml";
|
||||
public static final String ALGORITHM_WEBM_MUXER = "webm";
|
||||
public static final String ALGORITHM_MP3_METADATA = "mp3-metadata";
|
||||
public static final String ALGORITHM_MP4_METADATA = "mp4-metadata";
|
||||
public static final String ALGORITHM_MP4_FROM_DASH_MUXER = "mp4D-mp4";
|
||||
public static final String ALGORITHM_M4A_NO_DASH = "mp4D-m4a";
|
||||
public static final String ALGORITHM_OGG_FROM_WEBM_DEMUXER = "webm-ogg-d";
|
||||
|
||||
public static Postprocessing getAlgorithm(@NonNull String algorithmName, String[] args,
|
||||
StreamInfo streamInfo) {
|
||||
public static Postprocessing getAlgorithm(@NonNull String algorithmName,
|
||||
boolean embedMetadata,
|
||||
String[] args,
|
||||
@NonNull StreamInfo streamInfo) {
|
||||
Postprocessing instance;
|
||||
|
||||
switch (algorithmName) {
|
||||
@ -42,6 +48,12 @@ public abstract class Postprocessing implements Serializable {
|
||||
case ALGORITHM_WEBM_MUXER:
|
||||
instance = new WebMMuxer();
|
||||
break;
|
||||
case ALGORITHM_MP3_METADATA:
|
||||
instance = new Mp3Metadata();
|
||||
break;
|
||||
case ALGORITHM_MP4_METADATA:
|
||||
instance = new Mp4Metadata();
|
||||
break;
|
||||
case ALGORITHM_MP4_FROM_DASH_MUXER:
|
||||
instance = new Mp4FromDashMuxer();
|
||||
break;
|
||||
@ -51,14 +63,13 @@ public abstract class Postprocessing implements Serializable {
|
||||
case ALGORITHM_OGG_FROM_WEBM_DEMUXER:
|
||||
instance = new OggFromWebmDemuxer();
|
||||
break;
|
||||
/*case "example-algorithm":
|
||||
instance = new ExampleAlgorithm();*/
|
||||
default:
|
||||
throw new UnsupportedOperationException("Unimplemented post-processing algorithm: " + algorithmName);
|
||||
}
|
||||
|
||||
instance.args = args;
|
||||
instance.streamInfo = streamInfo;
|
||||
instance.embedMetadata = embedMetadata;
|
||||
return instance;
|
||||
}
|
||||
|
||||
@ -79,8 +90,25 @@ public abstract class Postprocessing implements Serializable {
|
||||
private final String name;
|
||||
|
||||
private String[] args;
|
||||
|
||||
/**
|
||||
* Indicates whether the metadata should be embedded in the file or not.
|
||||
*/
|
||||
boolean embedMetadata;
|
||||
|
||||
/**
|
||||
* StreamInfo object related to the current download
|
||||
*/
|
||||
@NonNull
|
||||
protected StreamInfo streamInfo;
|
||||
|
||||
/**
|
||||
* The thumbnail / cover art bitmap associated with the current download.
|
||||
* May be null.
|
||||
*/
|
||||
@Nullable
|
||||
protected Bitmap thumbnail;
|
||||
|
||||
private transient DownloadMission mission;
|
||||
|
||||
private transient File tempFile;
|
||||
@ -107,6 +135,10 @@ public abstract class Postprocessing implements Serializable {
|
||||
}
|
||||
}
|
||||
|
||||
public void setThumbnail(Bitmap thumbnail) {
|
||||
this.thumbnail = thumbnail;
|
||||
}
|
||||
|
||||
|
||||
public void run(DownloadMission target) throws IOException {
|
||||
this.mission = target;
|
||||
|
||||
@ -74,6 +74,7 @@ public class DownloadManagerService extends Service {
|
||||
private static final String EXTRA_KIND = "DownloadManagerService.extra.kind";
|
||||
private static final String EXTRA_THREADS = "DownloadManagerService.extra.threads";
|
||||
private static final String EXTRA_POSTPROCESSING_NAME = "DownloadManagerService.extra.postprocessingName";
|
||||
private static final String EXTRA_POSTPROCESSING_METADATA = "DownloadManagerService.extra.postprocessingMetadata";
|
||||
private static final String EXTRA_POSTPROCESSING_ARGS = "DownloadManagerService.extra.postprocessingArgs";
|
||||
private static final String EXTRA_NEAR_LENGTH = "DownloadManagerService.extra.nearLength";
|
||||
private static final String EXTRA_PATH = "DownloadManagerService.extra.storagePath";
|
||||
@ -348,20 +349,21 @@ public class DownloadManagerService extends Service {
|
||||
/**
|
||||
* Start a new download mission
|
||||
*
|
||||
* @param context the activity context
|
||||
* @param urls array of urls to download
|
||||
* @param storage where the file is saved
|
||||
* @param kind type of file (a: audio v: video s: subtitle ?: file-extension defined)
|
||||
* @param threads the number of threads maximal used to download chunks of the file.
|
||||
* @param psName the name of the required post-processing algorithm, or {@code null} to ignore.
|
||||
* @param streamInfo stream metadata that may be written into the downloaded file.
|
||||
* @param psArgs the arguments for the post-processing algorithm.
|
||||
* @param nearLength the approximated final length of the file
|
||||
* @param recoveryInfo array of MissionRecoveryInfo, in case is required recover the download
|
||||
* @param context the activity context
|
||||
* @param urls array of urls to download
|
||||
* @param storage where the file is saved
|
||||
* @param kind type of file (a: audio v: video s: subtitle ?: file-extension defined)
|
||||
* @param threads the number of threads maximal used to download chunks of the file.
|
||||
* @param streamInfo stream metadata that may be written into the downloaded file.
|
||||
* @param psName the name of the required post-processing algorithm, or {@code null} to ignore.
|
||||
* @param embedMetadata whether the metadata should be embedded into the downloaded file.
|
||||
* @param psArgs the arguments for the post-processing algorithm.
|
||||
* @param nearLength the approximated final length of the file
|
||||
* @param recoveryInfo array of MissionRecoveryInfo, in case is required recover the download
|
||||
*/
|
||||
public static void startMission(Context context, String[] urls, StoredFileHelper storage,
|
||||
char kind, int threads, StreamInfo streamInfo, String psName,
|
||||
String[] psArgs, long nearLength,
|
||||
boolean embedMetadata, String[] psArgs, long nearLength,
|
||||
ArrayList<MissionRecoveryInfo> recoveryInfo) {
|
||||
final Intent intent = new Intent(context, DownloadManagerService.class)
|
||||
.setAction(Intent.ACTION_RUN)
|
||||
@ -369,6 +371,7 @@ public class DownloadManagerService extends Service {
|
||||
.putExtra(EXTRA_KIND, kind)
|
||||
.putExtra(EXTRA_THREADS, threads)
|
||||
.putExtra(EXTRA_POSTPROCESSING_NAME, psName)
|
||||
.putExtra(EXTRA_POSTPROCESSING_METADATA, embedMetadata)
|
||||
.putExtra(EXTRA_POSTPROCESSING_ARGS, psArgs)
|
||||
.putExtra(EXTRA_NEAR_LENGTH, nearLength)
|
||||
.putExtra(EXTRA_RECOVERY_INFO, recoveryInfo)
|
||||
@ -387,10 +390,11 @@ public class DownloadManagerService extends Service {
|
||||
int threads = intent.getIntExtra(EXTRA_THREADS, 1);
|
||||
char kind = intent.getCharExtra(EXTRA_KIND, '?');
|
||||
String psName = intent.getStringExtra(EXTRA_POSTPROCESSING_NAME);
|
||||
boolean embedMetadata = intent.getBooleanExtra(EXTRA_POSTPROCESSING_METADATA, false);
|
||||
String[] psArgs = intent.getStringArrayExtra(EXTRA_POSTPROCESSING_ARGS);
|
||||
long nearLength = intent.getLongExtra(EXTRA_NEAR_LENGTH, 0);
|
||||
String tag = intent.getStringExtra(EXTRA_STORAGE_TAG);
|
||||
StreamInfo streamInfo = (StreamInfo)intent.getSerializableExtra(EXTRA_STREAM_INFO);
|
||||
StreamInfo streamInfo = (StreamInfo) intent.getSerializableExtra(EXTRA_STREAM_INFO);
|
||||
final var recovery = IntentCompat.getParcelableArrayListExtra(intent, EXTRA_RECOVERY_INFO,
|
||||
MissionRecoveryInfo.class);
|
||||
Objects.requireNonNull(recovery);
|
||||
@ -406,9 +410,10 @@ public class DownloadManagerService extends Service {
|
||||
if (psName == null)
|
||||
ps = null;
|
||||
else
|
||||
ps = Postprocessing.getAlgorithm(psName, psArgs, streamInfo);
|
||||
ps = Postprocessing.getAlgorithm(psName, embedMetadata, psArgs, streamInfo);
|
||||
|
||||
final DownloadMission mission = new DownloadMission(urls, storage, kind, ps);
|
||||
final DownloadMission mission = new DownloadMission(
|
||||
urls, storage, kind, ps, streamInfo, getApplicationContext());
|
||||
mission.threadCount = threads;
|
||||
mission.source = streamInfo.getUrl();
|
||||
mission.nearLength = nearLength;
|
||||
@ -417,7 +422,18 @@ public class DownloadManagerService extends Service {
|
||||
if (ps != null)
|
||||
ps.setTemporalDir(DownloadManager.pickAvailableTemporalDir(this));
|
||||
|
||||
handleConnectivityState(true);// first check the actual network status
|
||||
if (streamInfo != null) {
|
||||
new Thread(() -> {
|
||||
try {
|
||||
mission.fetchThumbnail(streamInfo.getThumbnails());
|
||||
} catch (Exception e) {
|
||||
Log.w(TAG, "failed to fetch thumbnail for mission: "
|
||||
+ mission.storage.getName(), e);
|
||||
}
|
||||
}, "ThumbnailFetcher").start();
|
||||
}
|
||||
|
||||
handleConnectivityState(true); // first check the actual network status
|
||||
|
||||
mManager.startMission(mission);
|
||||
}
|
||||
|
||||
@ -105,11 +105,21 @@
|
||||
android:text="@string/audio_track_present_in_video"
|
||||
android:textSize="12sp" />
|
||||
|
||||
<androidx.appcompat.widget.SwitchCompat
|
||||
android:id="@+id/metadata_switch"
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="wrap_content"
|
||||
android:layout_below="@+id/audio_track_present_in_video_text"
|
||||
android:layout_marginLeft="24dp"
|
||||
android:layout_marginRight="24dp"
|
||||
android:layout_marginBottom="12dp"
|
||||
android:text="@string/download_embed_metadata" />
|
||||
|
||||
<org.schabi.newpipe.views.NewPipeTextView
|
||||
android:id="@+id/threads_text_view"
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="wrap_content"
|
||||
android:layout_below="@+id/audio_track_present_in_video_text"
|
||||
android:layout_below="@+id/metadata_switch"
|
||||
android:layout_marginLeft="24dp"
|
||||
android:layout_marginRight="24dp"
|
||||
android:layout_marginBottom="6dp"
|
||||
|
||||
@ -354,6 +354,7 @@
|
||||
<string name="no_dir_yet">No download folder set yet, choose the default download folder now</string>
|
||||
<string name="msg_popup_permission">This permission is needed to\nopen in popup mode</string>
|
||||
<string name="one_item_deleted">1 item deleted.</string>
|
||||
<string name="download_embed_metadata">Embed metadata such as title, author, thumbnail</string>
|
||||
<!-- reCAPTCHA -->
|
||||
<string name="title_activity_recaptcha">reCAPTCHA challenge</string>
|
||||
<string name="subtitle_activity_recaptcha">Press \"Done\" when solved</string>
|
||||
|
||||
@ -65,7 +65,7 @@ teamnewpipe-nanojson = "e9d656ddb49a412a5a0a5d5ef20ca7ef09549996"
|
||||
# the corresponding commit hash, since JitPack sometimes deletes artifacts.
|
||||
# If there’s already a git hash, just add more of it to the end (or remove a letter)
|
||||
# to cause jitpack to regenerate the artifact.
|
||||
teamnewpipe-newpipe-extractor = "v0.25.2"
|
||||
teamnewpipe-newpipe-extractor = "1799852c25679026e3ff41a4b87993eaf4c748af"
|
||||
webkit = "1.14.0" # Newer versions require minSdk >= 23
|
||||
work = "2.10.5" # Newer versions require minSdk >= 23
|
||||
|
||||
@ -138,7 +138,7 @@ lisawray-groupie-core = { module = "com.github.lisawray.groupie:groupie", versio
|
||||
lisawray-groupie-viewbinding = { module = "com.github.lisawray.groupie:groupie-viewbinding", version.ref = "groupie" }
|
||||
livefront-bridge = { module = "com.github.livefront:bridge", version.ref = "bridge" }
|
||||
mockito-core = { module = "org.mockito:mockito-core", version.ref = "mockitoCore" }
|
||||
newpipe-extractor = { module = "com.github.TeamNewPipe:NewPipeExtractor", version.ref = "teamnewpipe-newpipe-extractor" }
|
||||
newpipe-extractor = { module = "com.github.tobigr:NewPipeExtractor", version.ref = "teamnewpipe-newpipe-extractor" }
|
||||
newpipe-filepicker = { module = "com.github.TeamNewPipe:NoNonsense-FilePicker", version.ref = "teamnewpipe-filepicker" }
|
||||
newpipe-nanojson = { module = "com.github.TeamNewPipe:nanojson", version.ref = "teamnewpipe-nanojson" }
|
||||
noties-markwon-core = { module = "io.noties.markwon:core", version.ref = "markwon" }
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user