Custom streaming video muxer.

fork-5.53.8
Alan Evans 2021-01-05 19:13:38 -04:00
rodzic 6080e1f338
commit b4c2e21415
37 zmienionych plików z 2018 dodań i 167 usunięć

Wyświetl plik

@ -337,6 +337,8 @@ dependencies {
implementation project(':libsignal-service')
implementation project(':paging')
implementation project(':core-util')
implementation project(':video')
implementation 'org.signal:zkgroup-android:0.7.0'
implementation 'org.whispersystems:signal-client-android:0.1.5'
implementation 'com.google.protobuf:protobuf-javalite:3.10.0'

Wyświetl plik

@ -749,7 +749,7 @@ public class AttachmentDatabase extends Database {
}
/**
* @param onlyModifyThisAttachment If false and more than one attachment shares this file, they will all up updated.
* @param onlyModifyThisAttachment If false and more than one attachment shares this file, they will all be updated.
* If true, then guarantees not to affect other attachments.
*/
public void updateAttachmentData(@NonNull DatabaseAttachment databaseAttachment,
@ -1030,7 +1030,7 @@ public class AttachmentDatabase extends Database {
}
}
private File newFile() throws IOException {
public File newFile() throws IOException {
File partsDirectory = context.getDir(DIRECTORY, Context.MODE_PRIVATE);
return File.createTempFile("part", ".mms", partsDirectory);
}

Wyświetl plik

@ -5,12 +5,18 @@ import android.media.MediaDataSource;
import androidx.annotation.NonNull;
import com.google.android.exoplayer2.util.MimeTypes;
import org.greenrobot.eventbus.EventBus;
import org.signal.core.util.logging.Log;
import org.thoughtcrime.securesms.R;
import org.thoughtcrime.securesms.attachments.Attachment;
import org.thoughtcrime.securesms.attachments.AttachmentId;
import org.thoughtcrime.securesms.attachments.DatabaseAttachment;
import org.thoughtcrime.securesms.crypto.AttachmentSecret;
import org.thoughtcrime.securesms.crypto.AttachmentSecretProvider;
import org.thoughtcrime.securesms.crypto.ModernDecryptingPartInputStream;
import org.thoughtcrime.securesms.crypto.ModernEncryptingPartOutputStream;
import org.thoughtcrime.securesms.database.AttachmentDatabase;
import org.thoughtcrime.securesms.database.DatabaseFactory;
import org.thoughtcrime.securesms.events.PartProgressEvent;
@ -26,15 +32,22 @@ import org.thoughtcrime.securesms.service.NotificationController;
import org.thoughtcrime.securesms.transport.UndeliverableMessageException;
import org.thoughtcrime.securesms.util.BitmapDecodingException;
import org.thoughtcrime.securesms.util.BitmapUtil;
import org.thoughtcrime.securesms.util.FeatureFlags;
import org.thoughtcrime.securesms.util.MediaUtil;
import org.thoughtcrime.securesms.util.MemoryFileDescriptor;
import org.thoughtcrime.securesms.util.MemoryFileDescriptor.MemoryFileException;
import org.thoughtcrime.securesms.video.InMemoryTranscoder;
import org.thoughtcrime.securesms.video.VideoSizeException;
import org.thoughtcrime.securesms.video.StreamingTranscoder;
import org.thoughtcrime.securesms.video.TranscoderCancelationSignal;
import org.thoughtcrime.securesms.video.TranscoderOptions;
import org.thoughtcrime.securesms.video.VideoSourceException;
import org.thoughtcrime.securesms.video.videoconverter.EncodingException;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.util.Objects;
import java.util.concurrent.TimeUnit;
public final class AttachmentCompressionJob extends BaseJob {
@ -177,7 +190,7 @@ public final class AttachmentCompressionJob extends BaseJob {
@NonNull DatabaseAttachment attachment,
@NonNull MediaConstraints constraints,
@NonNull EventBus eventBus,
@NonNull InMemoryTranscoder.CancelationSignal cancelationSignal)
@NonNull TranscoderCancelationSignal cancelationSignal)
throws UndeliverableMessageException
{
AttachmentDatabase.TransformProperties transformProperties = attachment.getTransformProperties();
@ -196,34 +209,73 @@ public final class AttachmentCompressionJob extends BaseJob {
notification.setIndeterminateProgress();
try (MediaDataSource dataSource = attachmentDatabase.mediaDataSourceFor(attachment.getAttachmentId())) {
if (dataSource == null) {
throw new UndeliverableMessageException("Cannot get media data source for attachment.");
}
allowSkipOnFailure = !transformProperties.isVideoEdited();
InMemoryTranscoder.Options options = null;
TranscoderOptions options = null;
if (transformProperties.isVideoTrim()) {
options = new InMemoryTranscoder.Options(transformProperties.getVideoTrimStartTimeUs(), transformProperties.getVideoTrimEndTimeUs());
options = new TranscoderOptions(transformProperties.getVideoTrimStartTimeUs(), transformProperties.getVideoTrimEndTimeUs());
}
try (InMemoryTranscoder transcoder = new InMemoryTranscoder(context, dataSource, options, constraints.getCompressedVideoMaxSize(context))) {
if (transcoder.isTranscodeRequired()) {
MediaStream mediaStream = transcoder.transcode(percent -> {
notification.setProgress(100, percent);
eventBus.postSticky(new PartProgressEvent(attachment,
PartProgressEvent.Type.COMPRESSION,
100,
percent));
}, cancelationSignal);
if (FeatureFlags.useStreamingVideoMuxer() || !MemoryFileDescriptor.supported()) {
StreamingTranscoder transcoder = new StreamingTranscoder(dataSource, options, constraints.getCompressedVideoMaxSize(context));
attachmentDatabase.updateAttachmentData(attachment, mediaStream, transformProperties.isVideoEdited());
attachmentDatabase.markAttachmentAsTransformed(attachment.getAttachmentId());
DatabaseAttachment updatedAttachment = attachmentDatabase.getAttachment(attachment.getAttachmentId());
if (updatedAttachment == null) {
throw new AssertionError();
if (transcoder.isTranscodeRequired()) {
Log.i(TAG, "Compressing with streaming muxer");
AttachmentSecret attachmentSecret = AttachmentSecretProvider.getInstance(context).getOrCreateAttachmentSecret();
File file = DatabaseFactory.getAttachmentDatabase(context)
.newFile();
file.deleteOnExit();
try {
try (OutputStream outputStream = ModernEncryptingPartOutputStream.createFor(attachmentSecret, file, true).second) {
transcoder.transcode(percent -> {
notification.setProgress(100, percent);
eventBus.postSticky(new PartProgressEvent(attachment,
PartProgressEvent.Type.COMPRESSION,
100,
percent));
}, outputStream, cancelationSignal);
}
MediaStream mediaStream = new MediaStream(ModernDecryptingPartInputStream.createFor(attachmentSecret, file, 0), MimeTypes.VIDEO_MP4, 0, 0);
attachmentDatabase.updateAttachmentData(attachment, mediaStream, transformProperties.isVideoEdited());
} finally {
if (!file.delete()) {
Log.w(TAG, "Failed to delete temp file");
}
}
attachmentDatabase.markAttachmentAsTransformed(attachment.getAttachmentId());
return Objects.requireNonNull(attachmentDatabase.getAttachment(attachment.getAttachmentId()));
} else {
Log.i(TAG, "Transcode was not required");
}
} else {
try (InMemoryTranscoder transcoder = new InMemoryTranscoder(context, dataSource, options, constraints.getCompressedVideoMaxSize(context))) {
if (transcoder.isTranscodeRequired()) {
Log.i(TAG, "Compressing with android in-memory muxer");
MediaStream mediaStream = transcoder.transcode(percent -> {
notification.setProgress(100, percent);
eventBus.postSticky(new PartProgressEvent(attachment,
PartProgressEvent.Type.COMPRESSION,
100,
percent));
}, cancelationSignal);
attachmentDatabase.updateAttachmentData(attachment, mediaStream, transformProperties.isVideoEdited());
attachmentDatabase.markAttachmentAsTransformed(attachment.getAttachmentId());
return Objects.requireNonNull(attachmentDatabase.getAttachment(attachment.getAttachmentId()));
} else {
Log.i(TAG, "Transcode was not required (in-memory transcoder)");
}
return updatedAttachment;
}
}
}
@ -237,7 +289,7 @@ public final class AttachmentCompressionJob extends BaseJob {
throw new UndeliverableMessageException("Failed to transcode and cannot skip due to editing", e);
}
}
} catch (IOException | MmsException | VideoSizeException e) {
} catch (IOException | MmsException e) {
throw new UndeliverableMessageException("Failed to transcode", e);
}
return attachment;

Wyświetl plik

@ -11,6 +11,7 @@ import org.signal.core.util.logging.Log;
import org.thoughtcrime.securesms.attachments.Attachment;
import org.thoughtcrime.securesms.util.BitmapDecodingException;
import org.thoughtcrime.securesms.util.BitmapUtil;
import org.thoughtcrime.securesms.util.FeatureFlags;
import org.thoughtcrime.securesms.util.MediaUtil;
import org.thoughtcrime.securesms.util.MemoryFileDescriptor;
@ -76,6 +77,6 @@ public abstract class MediaConstraints {
}
public static boolean isVideoTranscodeAvailable() {
return Build.VERSION.SDK_INT >= 26 && MemoryFileDescriptor.supported();
return Build.VERSION.SDK_INT >= 26 && (FeatureFlags.useStreamingVideoMuxer() || MemoryFileDescriptor.supported());
}
}

Wyświetl plik

@ -65,6 +65,7 @@ public final class FeatureFlags {
private static final String GV1_FORCED_MIGRATE = "android.groupsV1Migration.forced";
private static final String GV1_MIGRATION_JOB = "android.groupsV1Migration.job";
private static final String SEND_VIEWED_RECEIPTS = "android.sendViewedReceipts";
private static final String DISABLE_CUSTOM_VIDEO_MUXER = "android.disableCustomVideoMuxer";
/**
* We will only store remote values for flags in this set. If you want a flag to be controllable
@ -108,7 +109,8 @@ public final class FeatureFlags {
VERIFY_V2,
CLIENT_EXPIRATION,
GROUP_CALLING,
GV1_MIGRATION_JOB
GV1_MIGRATION_JOB,
DISABLE_CUSTOM_VIDEO_MUXER
);
/**
@ -253,6 +255,11 @@ public final class FeatureFlags {
return getBoolean(SEND_VIEWED_RECEIPTS, false);
}
/** Whether to use the custom streaming muxer or built in android muxer. */
public static boolean useStreamingVideoMuxer() {
return !getBoolean(DISABLE_CUSTOM_VIDEO_MUXER, false);
}
/** Only for rendering debug info. */
public static synchronized @NonNull Map<String, Object> getMemoryValues() {
return new TreeMap<>(REMOTE_VALUES);

Wyświetl plik

@ -39,14 +39,14 @@ public final class InMemoryTranscoder implements Closeable {
private final long memoryFileEstimate;
private final boolean transcodeRequired;
private final long fileSizeEstimate;
private final @Nullable Options options;
private final @Nullable TranscoderOptions options;
private @Nullable MemoryFileDescriptor memoryFile;
/**
* @param upperSizeLimit A upper size to transcode to. The actual output size can be up to 10% smaller.
*/
public InMemoryTranscoder(@NonNull Context context, @NonNull MediaDataSource dataSource, @Nullable Options options, long upperSizeLimit) throws IOException, VideoSourceException {
public InMemoryTranscoder(@NonNull Context context, @NonNull MediaDataSource dataSource, @Nullable TranscoderOptions options, long upperSizeLimit) throws IOException, VideoSourceException {
this.context = context;
this.dataSource = dataSource;
this.options = options;
@ -75,7 +75,7 @@ public final class InMemoryTranscoder implements Closeable {
}
public @NonNull MediaStream transcode(@NonNull Progress progress,
@Nullable CancelationSignal cancelationSignal)
@Nullable TranscoderCancelationSignal cancelationSignal)
throws IOException, EncodingException, VideoSizeException
{
if (memoryFile != null) throw new AssertionError("Not expecting to reuse transcoder");
@ -202,18 +202,4 @@ public final class InMemoryTranscoder implements Closeable {
public interface Progress {
void onProgress(int percent);
}
public interface CancelationSignal {
boolean isCanceled();
}
public final static class Options {
final long startTimeUs;
final long endTimeUs;
public Options(long startTimeUs, long endTimeUs) {
this.startTimeUs = startTimeUs;
this.endTimeUs = endTimeUs;
}
}
}

Wyświetl plik

@ -0,0 +1,214 @@
package org.thoughtcrime.securesms.video;
import android.media.MediaDataSource;
import android.media.MediaMetadataRetriever;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import org.signal.core.util.logging.Log;
import org.thoughtcrime.securesms.media.MediaInput;
import org.thoughtcrime.securesms.video.videoconverter.EncodingException;
import org.thoughtcrime.securesms.video.videoconverter.MediaConverter;
import java.io.FilterOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.text.NumberFormat;
import java.util.Locale;
@RequiresApi(26)
public final class StreamingTranscoder {
private static final String TAG = Log.tag(StreamingTranscoder.class);
private final MediaDataSource dataSource;
private final long upperSizeLimit;
private final long inSize;
private final long duration;
private final int inputBitRate;
private final VideoBitRateCalculator.Quality targetQuality;
private final long memoryFileEstimate;
private final boolean transcodeRequired;
private final long fileSizeEstimate;
private final @Nullable TranscoderOptions options;
/**
* @param upperSizeLimit A upper size to transcode to. The actual output size can be up to 10% smaller.
*/
public StreamingTranscoder(@NonNull MediaDataSource dataSource,
@Nullable TranscoderOptions options,
long upperSizeLimit)
throws IOException, VideoSourceException
{
this.dataSource = dataSource;
this.options = options;
final MediaMetadataRetriever mediaMetadataRetriever = new MediaMetadataRetriever();
try {
mediaMetadataRetriever.setDataSource(dataSource);
} catch (RuntimeException e) {
Log.w(TAG, "Unable to read datasource", e);
throw new VideoSourceException("Unable to read datasource", e);
}
this.inSize = dataSource.getSize();
this.duration = getDuration(mediaMetadataRetriever);
this.inputBitRate = VideoBitRateCalculator.bitRate(inSize, duration);
this.targetQuality = new VideoBitRateCalculator(upperSizeLimit).getTargetQuality(duration, inputBitRate);
this.upperSizeLimit = upperSizeLimit;
this.transcodeRequired = inputBitRate >= targetQuality.getTargetTotalBitRate() * 1.2 || inSize > upperSizeLimit || containsLocation(mediaMetadataRetriever) || options != null;
if (!transcodeRequired) {
Log.i(TAG, "Video is within 20% of target bitrate, below the size limit, contained no location metadata or custom options.");
}
this.fileSizeEstimate = targetQuality.getFileSizeEstimate();
this.memoryFileEstimate = (long) (fileSizeEstimate * 1.1);
}
public void transcode(@NonNull Progress progress,
@NonNull OutputStream stream,
@Nullable TranscoderCancelationSignal cancelationSignal)
throws IOException, EncodingException
{
float durationSec = duration / 1000f;
NumberFormat numberFormat = NumberFormat.getInstance(Locale.US);
Log.i(TAG, String.format(Locale.US,
"Transcoding:\n" +
"Target bitrate : %s + %s = %s\n" +
"Target format : %dp\n" +
"Video duration : %.1fs\n" +
"Size limit : %s kB\n" +
"Estimate : %s kB\n" +
"Input size : %s kB\n" +
"Input bitrate : %s bps",
numberFormat.format(targetQuality.getTargetVideoBitRate()),
numberFormat.format(targetQuality.getTargetAudioBitRate()),
numberFormat.format(targetQuality.getTargetTotalBitRate()),
targetQuality.getOutputResolution(),
durationSec,
numberFormat.format(upperSizeLimit / 1024),
numberFormat.format(fileSizeEstimate / 1024),
numberFormat.format(inSize / 1024),
numberFormat.format(inputBitRate)));
if (fileSizeEstimate > upperSizeLimit) {
throw new VideoSizeException("Size constraints could not be met!");
}
final long startTime = System.currentTimeMillis();
final MediaConverter converter = new MediaConverter();
final LimitedSizeOutputStream limitedSizeOutputStream = new LimitedSizeOutputStream(stream, upperSizeLimit);
converter.setInput(new MediaInput.MediaDataSourceMediaInput(dataSource));
converter.setOutput(limitedSizeOutputStream);
converter.setVideoResolution(targetQuality.getOutputResolution());
converter.setVideoBitrate(targetQuality.getTargetVideoBitRate());
converter.setAudioBitrate(targetQuality.getTargetAudioBitRate());
if (options != null) {
if (options.endTimeUs > 0) {
long timeFrom = options.startTimeUs / 1000;
long timeTo = options.endTimeUs / 1000;
converter.setTimeRange(timeFrom, timeTo);
Log.i(TAG, String.format(Locale.US, "Trimming:\nTotal duration: %d\nKeeping: %d..%d\nFinal duration:(%d)", duration, timeFrom, timeTo, timeTo - timeFrom));
}
}
converter.setListener(percent -> {
progress.onProgress(percent);
return cancelationSignal != null && cancelationSignal.isCanceled();
});
converter.convert();
long outSize = limitedSizeOutputStream.written;
float encodeDurationSec = (System.currentTimeMillis() - startTime) / 1000f;
Log.i(TAG, String.format(Locale.US,
"Transcoding complete:\n" +
"Transcode time : %.1fs (%.1fx)\n" +
"Output size : %s kB\n" +
" of Original : %.1f%%\n" +
" of Estimate : %.1f%%\n" +
" of Memory : %.1f%%\n" +
"Output bitrate : %s bps",
encodeDurationSec,
durationSec / encodeDurationSec,
numberFormat.format(outSize / 1024),
(outSize * 100d) / inSize,
(outSize * 100d) / fileSizeEstimate,
(outSize * 100d) / memoryFileEstimate,
numberFormat.format(VideoBitRateCalculator.bitRate(outSize, duration))));
if (outSize > upperSizeLimit) {
throw new VideoSizeException("Size constraints could not be met!");
}
stream.flush();
}
public boolean isTranscodeRequired() {
return transcodeRequired;
}
private static long getDuration(MediaMetadataRetriever mediaMetadataRetriever) throws VideoSourceException {
String durationString = mediaMetadataRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION);
if (durationString == null) {
throw new VideoSourceException("Cannot determine duration of video, null meta data");
}
try {
long duration = Long.parseLong(durationString);
if (duration <= 0) {
throw new VideoSourceException("Cannot determine duration of video, meta data: " + durationString);
}
return duration;
} catch (NumberFormatException e) {
throw new VideoSourceException("Cannot determine duration of video, meta data: " + durationString, e);
}
}
private static boolean containsLocation(MediaMetadataRetriever mediaMetadataRetriever) {
String locationString = mediaMetadataRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_LOCATION);
return locationString != null;
}
public interface Progress {
void onProgress(int percent);
}
private static class LimitedSizeOutputStream extends FilterOutputStream {
private final long sizeLimit;
private long written;
LimitedSizeOutputStream(@NonNull OutputStream inner, long sizeLimit) {
super(inner);
this.sizeLimit = sizeLimit;
}
@Override public void write(int b) throws IOException {
incWritten(1);
out.write(b);
}
@Override public void write(byte[] b, int off, int len) throws IOException {
incWritten(len);
out.write(b, off, len);
}
private void incWritten(int len) throws IOException {
long newWritten = written + len;
if (newWritten > sizeLimit) {
Log.w(TAG, String.format(Locale.US, "File size limit hit. Wrote %d, tried to write %d more. Limit is %d", written, len, sizeLimit));
throw new VideoSizeException("File size limit hit");
}
written = newWritten;
}
}
}

Wyświetl plik

@ -0,0 +1,5 @@
package org.thoughtcrime.securesms.video;
public interface TranscoderCancelationSignal {
boolean isCanceled();
}

Wyświetl plik

@ -0,0 +1,11 @@
package org.thoughtcrime.securesms.video;
public final class TranscoderOptions {
final long startTimeUs;
final long endTimeUs;
public TranscoderOptions(long startTimeUs, long endTimeUs) {
this.startTimeUs = startTimeUs;
this.endTimeUs = endTimeUs;
}
}

Wyświetl plik

@ -1,6 +1,8 @@
package org.thoughtcrime.securesms.video;
public final class VideoSizeException extends Exception {
import java.io.IOException;
public final class VideoSizeException extends IOException {
VideoSizeException(String message) {
super(message);

Wyświetl plik

@ -30,11 +30,13 @@ import androidx.annotation.WorkerThread;
import org.signal.core.util.logging.Log;
import org.thoughtcrime.securesms.media.MediaInput;
import org.thoughtcrime.securesms.video.videoconverter.muxer.StreamingMuxer;
import java.io.File;
import java.io.FileDescriptor;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.OutputStream;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
@ -85,6 +87,10 @@ public final class MediaConverter {
mOutput = new FileDescriptorOutput(fileDescriptor);
}
public void setOutput(final @NonNull OutputStream stream) {
mOutput = new StreamOutput(stream);
}
@SuppressWarnings("unused")
public void setTimeRange(long timeFrom, long timeTo) {
mTimeFrom = timeFrom;
@ -332,4 +338,18 @@ public final class MediaConverter {
return new AndroidMuxer(fileDescriptor);
}
}
private static class StreamOutput implements Output {
final OutputStream outputStream;
StreamOutput(final @NonNull OutputStream outputStream) {
this.outputStream = outputStream;
}
@Override
public @NonNull Muxer createMuxer() {
return new StreamingMuxer(outputStream);
}
}
}

Wyświetl plik

@ -1,10 +0,0 @@
package org.thoughtcrime.securesms.video.videoconverter;
final class Preconditions {
static void checkState(final Object errorMessage, final boolean expression) {
if (!expression) {
throw new IllegalStateException(String.valueOf(errorMessage));
}
}
}

Wyświetl plik

@ -423,6 +423,15 @@ dependencyVerification {
['org.jsoup:jsoup:1.8.3',
'abeaf34795a4de70f72aed6de5966d2955ec7eb348eeb813324f23c999575473'],
['org.mp4parser:isoparser:1.9.39',
'a3a7172648f1ac4b2a369ecca2861317e472179c842a5217b08643ba0a1dfa12'],
['org.mp4parser:muxer:1.9.39',
'4befe68d411cd889628b53bab211d395899a9ce893ae6766ec2f4fefec5b7835'],
['org.mp4parser:streaming:1.9.39',
'da5151cfc3bf491d550fb9127bba22736f4b7416058d58a1a5fcfdfa3673876d'],
['org.signal:aesgcmprovider:0.0.3',
'6eb4422e8a618b3b76cb2096a3619d251f9e27989dc68307a1e5414c3710f2d1'],
@ -441,6 +450,9 @@ dependencyVerification {
['org.signal:zkgroup-java:0.7.0',
'd0099eedd60d6f7d4df5b288175e5d585228ed8897789926bdab69bf8c05659f'],
['org.slf4j:slf4j-api:1.7.24',
'baf3c7fe15fefeaf9e5b000d94547379dc48370f22a8797e239c127e7d7756ec'],
['org.threeten:threetenbp:1.3.6',
'f4c23ffaaed717c3b99c003e0ee02d6d66377fd47d866fec7d971bd8644fc1a7'],

Wyświetl plik

@ -38,10 +38,11 @@ protobuf {
}
dependencies {
implementation 'androidx.appcompat:appcompat:1.2.0'
implementation 'com.google.android.material:material:1.2.1'
lintChecks project(':lintchecks')
api 'androidx.annotation:annotation:1.1.0'
implementation 'com.google.protobuf:protobuf-javalite:3.10.0'
testImplementation 'junit:junit:4.13.1'
androidTestImplementation 'androidx.test.ext:junit:1.1.2'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.3.0'
}
}

Wyświetl plik

@ -1,17 +0,0 @@
package org.signal.core.util;
import org.junit.Test;
import static org.junit.Assert.*;
/**
* Example local unit test, which will execute on the development machine (host).
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
public class ExampleUnitTest {
@Test
public void addition_isCorrect() {
assertEquals(4, 2 + 2);
}
}

Wyświetl plik

@ -3,97 +3,10 @@
dependencyVerification {
verify = [
['androidx.activity:activity:1.0.0',
'd1bc9842455c2e534415d88c44df4d52413b478db9093a1ba36324f705f44c3d'],
['androidx.annotation:annotation-experimental:1.0.0',
'b219d2b568e7e4ba534e09f8c2fd242343df6ccbdfbbe938846f5d740e6b0b11'],
['androidx.annotation:annotation:1.1.0',
'd38d63edb30f1467818d50aaf05f8a692dea8b31392a049bfa991b159ad5b692'],
['androidx.appcompat:appcompat-resources:1.2.0',
'c470297c03ff3de1c3d15dacf0be0cae63abc10b52f021dd07ae28daa3100fe5'],
['androidx.appcompat:appcompat:1.2.0',
'3d2131a55a61a777322e2126e0018011efa6339e53b44153eb651b16020cca70'],
['androidx.arch.core:core-common:2.1.0',
'fe1237bf029d063e7f29fe39aeaf73ef74c8b0a3658486fc29d3c54326653889'],
['androidx.arch.core:core-runtime:2.0.0',
'87e65fc767c712b437649c7cee2431ebb4bed6daef82e501d4125b3ed3f65f8e'],
['androidx.cardview:cardview:1.0.0',
'1193c04c22a3d6b5946dae9f4e8c59d6adde6a71b6bd5d87fb99d82dda1afec7'],
['androidx.collection:collection:1.1.0',
'632a0e5407461de774409352940e292a291037724207a787820c77daf7d33b72'],
['androidx.coordinatorlayout:coordinatorlayout:1.1.0',
'44a9e30abf56af1025c52a0af506fee9c4131aa55efda52f9fd9451211c5e8cb'],
['androidx.core:core:1.3.0',
'1c6b6626f15185d8f4bc7caac759412a1ab6e851ecf7526387d9b9fadcabdb63'],
['androidx.cursoradapter:cursoradapter:1.0.0',
'a81c8fe78815fa47df5b749deb52727ad11f9397da58b16017f4eb2c11e28564'],
['androidx.customview:customview:1.0.0',
'20e5b8f6526a34595a604f56718da81167c0b40a7a94a57daa355663f2594df2'],
['androidx.drawerlayout:drawerlayout:1.0.0',
'9402442cdc5a43cf62fb14f8cf98c63342d4d9d9b805c8033c6cf7e802749ac1'],
['androidx.fragment:fragment:1.1.0',
'a14c8b8f2153f128e800fbd266a6beab1c283982a29ec570d2cc05d307d81496'],
['androidx.interpolator:interpolator:1.0.0',
'33193135a64fe21fa2c35eec6688f1a76e512606c0fc83dc1b689e37add7732a'],
['androidx.lifecycle:lifecycle-common:2.1.0',
'76db6be533bd730fb361c2feb12a2c26d9952824746847da82601ef81f082643'],
['androidx.lifecycle:lifecycle-livedata-core:2.0.0',
'fde334ec7e22744c0f5bfe7caf1a84c9d717327044400577bdf9bd921ec4f7bc'],
['androidx.lifecycle:lifecycle-livedata:2.0.0',
'c82609ced8c498f0a701a30fb6771bb7480860daee84d82e0a81ee86edf7ba39'],
['androidx.lifecycle:lifecycle-runtime:2.1.0',
'e5173897b965e870651e83d9d5af1742d3f532d58863223a390ce3a194c8312b'],
['androidx.lifecycle:lifecycle-viewmodel:2.1.0',
'ba55fb7ac1b2828d5327cda8acf7085d990b2b4c43ef336caa67686249b8523d'],
['androidx.loader:loader:1.0.0',
'11f735cb3b55c458d470bed9e25254375b518b4b1bad6926783a7026db0f5025'],
['androidx.recyclerview:recyclerview:1.1.0',
'f0d2b5a67d0a91ee1b1c73ef2b636a81f3563925ddd15a1d4e1c41ec28de7a4f'],
['androidx.savedstate:savedstate:1.0.0',
'2510a5619c37579c9ce1a04574faaf323cd0ffe2fc4e20fa8f8f01e5bb402e83'],
['androidx.transition:transition:1.2.0',
'a1e059b3bc0b43a58dec0efecdcaa89c82d2bca552ea5bacf6656c46e853157e'],
['androidx.vectordrawable:vectordrawable-animated:1.1.0',
'76da2c502371d9c38054df5e2b248d00da87809ed058f3363eae87ce5e2403f8'],
['androidx.vectordrawable:vectordrawable:1.1.0',
'46fd633ac01b49b7fcabc263bf098c5a8b9e9a69774d234edcca04fb02df8e26'],
['androidx.versionedparcelable:versionedparcelable:1.1.0',
'9a1d77140ac222b7866b5054ee7d159bc1800987ed2d46dd6afdd145abb710c1'],
['androidx.viewpager2:viewpager2:1.0.0',
'e95c0031d4cc247cd48196c6287e58d2cee54d9c79b85afea7c90920330275af'],
['androidx.viewpager:viewpager:1.0.0',
'147af4e14a1984010d8f155e5e19d781f03c1d70dfed02a8e0d18428b8fc8682'],
['com.google.android.material:material:1.2.1',
'd3d0cc776f2341da8e572586c7d390a5b356ce39a0deb2768071dc40b364ac80'],
['com.google.protobuf:protobuf-javalite:3.10.0',
'215a94dbe100130295906b531bb72a26965c7ac8fcd9a75bf8054a8ac2abf4b4'],
]
}

Wyświetl plik

@ -95,5 +95,8 @@ dependencyVerification {
['com.google.android.material:material:1.2.1',
'd3d0cc776f2341da8e572586c7d390a5b356ce39a0deb2768071dc40b364ac80'],
['com.google.protobuf:protobuf-javalite:3.10.0',
'215a94dbe100130295906b531bb72a26965c7ac8fcd9a75bf8054a8ac2abf4b4'],
]
}

Wyświetl plik

@ -4,6 +4,7 @@ include ':lintchecks'
include ':paging'
include ':paging-app'
include ':core-util'
include ':video'
project(':app').name = 'Signal-Android'
project(':paging').projectDir = file('paging/lib')

36
video/build.gradle 100644
Wyświetl plik

@ -0,0 +1,36 @@
apply plugin: 'com.android.library'
apply plugin: 'witness'
apply from: 'witness-verifications.gradle'
android {
buildToolsVersion BUILD_TOOL_VERSION
compileSdkVersion COMPILE_SDK
defaultConfig {
minSdkVersion MINIMUM_SDK
targetSdkVersion TARGET_SDK
}
compileOptions {
sourceCompatibility JAVA_VERSION
targetCompatibility JAVA_VERSION
}
}
dependencyVerification {
configuration = '(debug|release)RuntimeClasspath'
}
dependencies {
lintChecks project(':lintchecks')
api 'androidx.annotation:annotation:1.1.0'
implementation project(':core-util')
implementation 'org.mp4parser:isoparser:1.9.39'
implementation 'org.mp4parser:streaming:1.9.39'
implementation('org.mp4parser:muxer:1.9.39') {
exclude group: 'junit', module: 'junit'
}
}

Wyświetl plik

@ -0,0 +1,2 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest package="org.signal.video" />

Wyświetl plik

@ -12,7 +12,7 @@ import java.io.FileDescriptor;
import java.io.IOException;
import java.nio.ByteBuffer;
public final class AndroidMuxer implements Muxer {
final class AndroidMuxer implements Muxer {
private final MediaMuxer muxer;
@ -20,13 +20,11 @@ public final class AndroidMuxer implements Muxer {
muxer = new MediaMuxer(file.getAbsolutePath(), MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
}
@RequiresApi(26)
AndroidMuxer(final @NonNull FileDescriptor fileDescriptor) throws IOException {
muxer = new MediaMuxer(fileDescriptor, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
}
@Override
public void start() {
muxer.start();

Wyświetl plik

@ -0,0 +1,10 @@
package org.thoughtcrime.securesms.video.videoconverter;
public final class Preconditions {
public static void checkState(final String errorMessage, final boolean expression) {
if (!expression) {
throw new IllegalStateException(errorMessage);
}
}
}

Wyświetl plik

@ -0,0 +1,116 @@
package org.thoughtcrime.securesms.video.videoconverter.muxer;
import android.util.SparseIntArray;
import org.mp4parser.boxes.iso14496.part1.objectdescriptors.AudioSpecificConfig;
import org.mp4parser.boxes.iso14496.part1.objectdescriptors.DecoderConfigDescriptor;
import org.mp4parser.boxes.iso14496.part1.objectdescriptors.ESDescriptor;
import org.mp4parser.boxes.iso14496.part1.objectdescriptors.SLConfigDescriptor;
import org.mp4parser.boxes.iso14496.part12.SampleDescriptionBox;
import org.mp4parser.boxes.iso14496.part14.ESDescriptorBox;
import org.mp4parser.boxes.sampleentry.AudioSampleEntry;
import org.mp4parser.streaming.extensions.DefaultSampleFlagsTrackExtension;
import org.mp4parser.streaming.input.AbstractStreamingTrack;
import org.mp4parser.streaming.input.StreamingSampleImpl;
import java.io.IOException;
import java.nio.ByteBuffer;
abstract class AacTrack extends AbstractStreamingTrack {
private static final SparseIntArray SAMPLING_FREQUENCY_INDEX_MAP = new SparseIntArray();
static {
SAMPLING_FREQUENCY_INDEX_MAP.put(96000, 0);
SAMPLING_FREQUENCY_INDEX_MAP.put(88200, 1);
SAMPLING_FREQUENCY_INDEX_MAP.put(64000, 2);
SAMPLING_FREQUENCY_INDEX_MAP.put(48000, 3);
SAMPLING_FREQUENCY_INDEX_MAP.put(44100, 4);
SAMPLING_FREQUENCY_INDEX_MAP.put(32000, 5);
SAMPLING_FREQUENCY_INDEX_MAP.put(24000, 6);
SAMPLING_FREQUENCY_INDEX_MAP.put(22050, 7);
SAMPLING_FREQUENCY_INDEX_MAP.put(16000, 8);
SAMPLING_FREQUENCY_INDEX_MAP.put(12000, 9);
SAMPLING_FREQUENCY_INDEX_MAP.put(11025, 10);
SAMPLING_FREQUENCY_INDEX_MAP.put(8000, 11);
}
private final SampleDescriptionBox stsd;
private int sampleRate;
AacTrack(long avgBitrate, long maxBitrate, int sampleRate, int channelCount, int aacProfile) {
this.sampleRate = sampleRate;
final DefaultSampleFlagsTrackExtension defaultSampleFlagsTrackExtension = new DefaultSampleFlagsTrackExtension();
defaultSampleFlagsTrackExtension.setIsLeading(2);
defaultSampleFlagsTrackExtension.setSampleDependsOn(2);
defaultSampleFlagsTrackExtension.setSampleIsDependedOn(2);
defaultSampleFlagsTrackExtension.setSampleHasRedundancy(2);
defaultSampleFlagsTrackExtension.setSampleIsNonSyncSample(false);
this.addTrackExtension(defaultSampleFlagsTrackExtension);
stsd = new SampleDescriptionBox();
final AudioSampleEntry audioSampleEntry = new AudioSampleEntry("mp4a");
if (channelCount == 7) {
audioSampleEntry.setChannelCount(8);
} else {
audioSampleEntry.setChannelCount(channelCount);
}
audioSampleEntry.setSampleRate(sampleRate);
audioSampleEntry.setDataReferenceIndex(1);
audioSampleEntry.setSampleSize(16);
final ESDescriptorBox esds = new ESDescriptorBox();
ESDescriptor descriptor = new ESDescriptor();
descriptor.setEsId(0);
final SLConfigDescriptor slConfigDescriptor = new SLConfigDescriptor();
slConfigDescriptor.setPredefined(2);
descriptor.setSlConfigDescriptor(slConfigDescriptor);
final DecoderConfigDescriptor decoderConfigDescriptor = new DecoderConfigDescriptor();
decoderConfigDescriptor.setObjectTypeIndication(0x40 /*Audio ISO/IEC 14496-3*/);
decoderConfigDescriptor.setStreamType(5 /*audio stream*/);
decoderConfigDescriptor.setBufferSizeDB(1536);
decoderConfigDescriptor.setMaxBitRate(maxBitrate);
decoderConfigDescriptor.setAvgBitRate(avgBitrate);
final AudioSpecificConfig audioSpecificConfig = new AudioSpecificConfig();
audioSpecificConfig.setOriginalAudioObjectType(aacProfile);
audioSpecificConfig.setSamplingFrequencyIndex(SAMPLING_FREQUENCY_INDEX_MAP.get(sampleRate));
audioSpecificConfig.setChannelConfiguration(channelCount);
decoderConfigDescriptor.setAudioSpecificInfo(audioSpecificConfig);
descriptor.setDecoderConfigDescriptor(decoderConfigDescriptor);
esds.setEsDescriptor(descriptor);
audioSampleEntry.addBox(esds);
stsd.addBox(audioSampleEntry);
}
public long getTimescale() {
return sampleRate;
}
public String getHandler() {
return "soun";
}
public String getLanguage() {
return "\u0060\u0060\u0060"; // 0 in Iso639
}
public synchronized SampleDescriptionBox getSampleDescriptionBox() {
return stsd;
}
public void close() {
}
void processSample(ByteBuffer frame) throws IOException {
sampleSink.acceptSample(new StreamingSampleImpl(frame, 1024), this);
}
}

Wyświetl plik

@ -0,0 +1,478 @@
package org.thoughtcrime.securesms.video.videoconverter.muxer;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import org.mp4parser.boxes.iso14496.part12.SampleDescriptionBox;
import org.mp4parser.boxes.iso14496.part15.AvcConfigurationBox;
import org.mp4parser.boxes.sampleentry.VisualSampleEntry;
import org.mp4parser.streaming.SampleExtension;
import org.mp4parser.streaming.StreamingSample;
import org.mp4parser.streaming.extensions.CompositionTimeSampleExtension;
import org.mp4parser.streaming.extensions.CompositionTimeTrackExtension;
import org.mp4parser.streaming.extensions.DimensionTrackExtension;
import org.mp4parser.streaming.extensions.SampleFlagsSampleExtension;
import org.mp4parser.streaming.input.AbstractStreamingTrack;
import org.mp4parser.streaming.input.StreamingSampleImpl;
import org.mp4parser.streaming.input.h264.H264NalUnitHeader;
import org.mp4parser.streaming.input.h264.H264NalUnitTypes;
import org.mp4parser.streaming.input.h264.spspps.PictureParameterSet;
import org.mp4parser.streaming.input.h264.spspps.SeqParameterSet;
import org.mp4parser.streaming.input.h264.spspps.SliceHeader;
import org.signal.core.util.logging.Log;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
abstract class AvcTrack extends AbstractStreamingTrack {
private static final String TAG = "AvcTrack";
private int maxDecFrameBuffering = 16;
private final List<StreamingSample> decFrameBuffer = new ArrayList<>();
private final List<StreamingSample> decFrameBuffer2 = new ArrayList<>();
private final LinkedHashMap<Integer, ByteBuffer> spsIdToSpsBytes = new LinkedHashMap<>();
private final LinkedHashMap<Integer, SeqParameterSet> spsIdToSps = new LinkedHashMap<>();
private final LinkedHashMap<Integer, ByteBuffer> ppsIdToPpsBytes = new LinkedHashMap<>();
private final LinkedHashMap<Integer, PictureParameterSet> ppsIdToPps = new LinkedHashMap<>();
private int timescale = 90000;
private int frametick = 3000;
private final SampleDescriptionBox stsd;
private final List<ByteBuffer> bufferedNals = new ArrayList<>();
private FirstVclNalDetector fvnd;
private H264NalUnitHeader sliceNalUnitHeader;
private long currentPresentationTimeUs;
AvcTrack(final @NonNull ByteBuffer spsBuffer, final @NonNull ByteBuffer ppsBuffer) {
handlePPS(ppsBuffer);
final SeqParameterSet sps = handleSPS(spsBuffer);
int width = (sps.pic_width_in_mbs_minus1 + 1) * 16;
int mult = 2;
if (sps.frame_mbs_only_flag) {
mult = 1;
}
int height = 16 * (sps.pic_height_in_map_units_minus1 + 1) * mult;
if (sps.frame_cropping_flag) {
int chromaArrayType = 0;
if (!sps.residual_color_transform_flag) {
chromaArrayType = sps.chroma_format_idc.getId();
}
int cropUnitX = 1;
int cropUnitY = mult;
if (chromaArrayType != 0) {
cropUnitX = sps.chroma_format_idc.getSubWidth();
cropUnitY = sps.chroma_format_idc.getSubHeight() * mult;
}
width -= cropUnitX * (sps.frame_crop_left_offset + sps.frame_crop_right_offset);
height -= cropUnitY * (sps.frame_crop_top_offset + sps.frame_crop_bottom_offset);
}
final VisualSampleEntry visualSampleEntry = new VisualSampleEntry("avc1");
visualSampleEntry.setDataReferenceIndex(1);
visualSampleEntry.setDepth(24);
visualSampleEntry.setFrameCount(1);
visualSampleEntry.setHorizresolution(72);
visualSampleEntry.setVertresolution(72);
final DimensionTrackExtension dte = this.getTrackExtension(DimensionTrackExtension.class);
if (dte == null) {
this.addTrackExtension(new DimensionTrackExtension(width, height));
}
visualSampleEntry.setWidth(width);
visualSampleEntry.setHeight(height);
visualSampleEntry.setCompressorname("AVC Coding");
final AvcConfigurationBox avcConfigurationBox = new AvcConfigurationBox();
avcConfigurationBox.setSequenceParameterSets(Collections.singletonList(spsBuffer));
avcConfigurationBox.setPictureParameterSets(Collections.singletonList(ppsBuffer));
avcConfigurationBox.setAvcLevelIndication(sps.level_idc);
avcConfigurationBox.setAvcProfileIndication(sps.profile_idc);
avcConfigurationBox.setBitDepthLumaMinus8(sps.bit_depth_luma_minus8);
avcConfigurationBox.setBitDepthChromaMinus8(sps.bit_depth_chroma_minus8);
avcConfigurationBox.setChromaFormat(sps.chroma_format_idc.getId());
avcConfigurationBox.setConfigurationVersion(1);
avcConfigurationBox.setLengthSizeMinusOne(3);
avcConfigurationBox.setProfileCompatibility(
(sps.constraint_set_0_flag ? 128 : 0) +
(sps.constraint_set_1_flag ? 64 : 0) +
(sps.constraint_set_2_flag ? 32 : 0) +
(sps.constraint_set_3_flag ? 16 : 0) +
(sps.constraint_set_4_flag ? 8 : 0) +
(int) (sps.reserved_zero_2bits & 0x3)
);
visualSampleEntry.addBox(avcConfigurationBox);
stsd = new SampleDescriptionBox();
stsd.addBox(visualSampleEntry);
int _timescale;
int _frametick;
if (sps.vuiParams != null) {
_timescale = sps.vuiParams.time_scale >> 1; // Not sure why, but I found this in several places, and it works...
_frametick = sps.vuiParams.num_units_in_tick;
if (_timescale == 0 || _frametick == 0) {
Log.w(TAG, "vuiParams contain invalid values: time_scale: " + _timescale + " and frame_tick: " + _frametick + ". Setting frame rate to 30fps");
_timescale = 0;
_frametick = 0;
}
if (_frametick > 0) {
if (_timescale / _frametick > 100) {
Log.w(TAG, "Framerate is " + (_timescale / _frametick) + ". That is suspicious.");
}
} else {
Log.w(TAG, "Frametick is " + _frametick + ". That is suspicious.");
}
if (sps.vuiParams.bitstreamRestriction != null) {
maxDecFrameBuffering = sps.vuiParams.bitstreamRestriction.max_dec_frame_buffering;
}
} else {
Log.w(TAG, "Can't determine frame rate as SPS does not contain vuiParama");
_timescale = 0;
_frametick = 0;
}
if (_timescale != 0 && _frametick != 0) {
timescale = _timescale;
frametick = _frametick;
}
if (sps.pic_order_cnt_type == 0) {
addTrackExtension(new CompositionTimeTrackExtension());
} else if (sps.pic_order_cnt_type == 1) {
throw new MuxingException("Have not yet imlemented pic_order_cnt_type 1");
}
}
public long getTimescale() {
return timescale;
}
public String getHandler() {
return "vide";
}
public String getLanguage() {
return "\u0060\u0060\u0060"; // 0 in Iso639
}
public SampleDescriptionBox getSampleDescriptionBox() {
return stsd;
}
public void close() {
}
private static H264NalUnitHeader getNalUnitHeader(@NonNull final ByteBuffer nal) {
final H264NalUnitHeader nalUnitHeader = new H264NalUnitHeader();
final int type = nal.get(0);
nalUnitHeader.nal_ref_idc = (type >> 5) & 3;
nalUnitHeader.nal_unit_type = type & 0x1f;
return nalUnitHeader;
}
void consumeNal(@NonNull final ByteBuffer nal, final long presentationTimeUs) throws IOException {
final H264NalUnitHeader nalUnitHeader = getNalUnitHeader(nal);
switch (nalUnitHeader.nal_unit_type) {
case H264NalUnitTypes.CODED_SLICE_NON_IDR:
case H264NalUnitTypes.CODED_SLICE_DATA_PART_A:
case H264NalUnitTypes.CODED_SLICE_DATA_PART_B:
case H264NalUnitTypes.CODED_SLICE_DATA_PART_C:
case H264NalUnitTypes.CODED_SLICE_IDR:
final FirstVclNalDetector current = new FirstVclNalDetector(nal, nalUnitHeader.nal_ref_idc, nalUnitHeader.nal_unit_type);
if (fvnd != null && fvnd.isFirstInNew(current)) {
pushSample(createSample(bufferedNals, fvnd.sliceHeader, sliceNalUnitHeader, presentationTimeUs - currentPresentationTimeUs), false, false);
bufferedNals.clear();
}
currentPresentationTimeUs = Math.max(currentPresentationTimeUs, presentationTimeUs);
sliceNalUnitHeader = nalUnitHeader;
fvnd = current;
bufferedNals.add(nal);
break;
case H264NalUnitTypes.SEI:
case H264NalUnitTypes.AU_UNIT_DELIMITER:
if (fvnd != null) {
pushSample(createSample(bufferedNals, fvnd.sliceHeader, sliceNalUnitHeader, presentationTimeUs - currentPresentationTimeUs), false, false);
bufferedNals.clear();
fvnd = null;
}
bufferedNals.add(nal);
break;
case H264NalUnitTypes.SEQ_PARAMETER_SET:
if (fvnd != null) {
pushSample(createSample(bufferedNals, fvnd.sliceHeader, sliceNalUnitHeader, presentationTimeUs - currentPresentationTimeUs), false, false);
bufferedNals.clear();
fvnd = null;
}
handleSPS(nal);
break;
case H264NalUnitTypes.PIC_PARAMETER_SET:
if (fvnd != null) {
pushSample(createSample(bufferedNals, fvnd.sliceHeader, sliceNalUnitHeader, presentationTimeUs - currentPresentationTimeUs), false, false);
bufferedNals.clear();
fvnd = null;
}
handlePPS(nal);
break;
case H264NalUnitTypes.END_OF_SEQUENCE:
case H264NalUnitTypes.END_OF_STREAM:
return;
case H264NalUnitTypes.SEQ_PARAMETER_SET_EXT:
throw new IOException("Sequence parameter set extension is not yet handled. Needs TLC.");
default:
Log.w(TAG, "Unknown NAL unit type: " + nalUnitHeader.nal_unit_type);
}
}
void consumeLastNal() throws IOException {
pushSample(createSample(bufferedNals, fvnd.sliceHeader, sliceNalUnitHeader, 0), true, true);
}
private void pushSample(final StreamingSample ss, final boolean all, final boolean force) throws IOException {
if (ss != null) {
decFrameBuffer.add(ss);
}
if (all) {
while (decFrameBuffer.size() > 0) {
pushSample(null, false, true);
}
} else {
if ((decFrameBuffer.size() - 1 > maxDecFrameBuffering) || force) {
final StreamingSample first = decFrameBuffer.remove(0);
final PictureOrderCountType0SampleExtension poct0se = first.getSampleExtension(PictureOrderCountType0SampleExtension.class);
if (poct0se == null) {
sampleSink.acceptSample(first, this);
} else {
int delay = 0;
for (StreamingSample streamingSample : decFrameBuffer) {
if (poct0se.getPoc() > streamingSample.getSampleExtension(PictureOrderCountType0SampleExtension.class).getPoc()) {
delay++;
}
}
for (StreamingSample streamingSample : decFrameBuffer2) {
if (poct0se.getPoc() < streamingSample.getSampleExtension(PictureOrderCountType0SampleExtension.class).getPoc()) {
delay--;
}
}
decFrameBuffer2.add(first);
if (decFrameBuffer2.size() > maxDecFrameBuffering) {
decFrameBuffer2.remove(0).removeSampleExtension(PictureOrderCountType0SampleExtension.class);
}
first.addSampleExtension(CompositionTimeSampleExtension.create(delay * frametick));
sampleSink.acceptSample(first, this);
}
}
}
}
private SampleFlagsSampleExtension createSampleFlagsSampleExtension(H264NalUnitHeader nu, SliceHeader sliceHeader) {
final SampleFlagsSampleExtension sampleFlagsSampleExtension = new SampleFlagsSampleExtension();
if (nu.nal_ref_idc == 0) {
sampleFlagsSampleExtension.setSampleIsDependedOn(2);
} else {
sampleFlagsSampleExtension.setSampleIsDependedOn(1);
}
if ((sliceHeader.slice_type == SliceHeader.SliceType.I) || (sliceHeader.slice_type == SliceHeader.SliceType.SI)) {
sampleFlagsSampleExtension.setSampleDependsOn(2);
} else {
sampleFlagsSampleExtension.setSampleDependsOn(1);
}
sampleFlagsSampleExtension.setSampleIsNonSyncSample(H264NalUnitTypes.CODED_SLICE_IDR != nu.nal_unit_type);
return sampleFlagsSampleExtension;
}
private PictureOrderCountType0SampleExtension createPictureOrderCountType0SampleExtension(SliceHeader sliceHeader) {
if (sliceHeader.sps.pic_order_cnt_type == 0) {
return new PictureOrderCountType0SampleExtension(
sliceHeader, decFrameBuffer.size() > 0 ?
decFrameBuffer.get(decFrameBuffer.size() - 1).getSampleExtension(PictureOrderCountType0SampleExtension.class) :
null);
/* decFrameBuffer.add(ssi);
if (decFrameBuffer.size() - 1 > maxDecFrameBuffering) { // just added one
drainDecPictureBuffer(false);
}*/
} else if (sliceHeader.sps.pic_order_cnt_type == 1) {
throw new MuxingException("pic_order_cnt_type == 1 needs to be implemented");
} else if (sliceHeader.sps.pic_order_cnt_type == 2) {
return null; // no ctts
}
throw new MuxingException("I don't know sliceHeader.sps.pic_order_cnt_type of " + sliceHeader.sps.pic_order_cnt_type);
}
private StreamingSample createSample(List<ByteBuffer> nals, SliceHeader sliceHeader, H264NalUnitHeader nu, long sampleDurationNs) {
final long sampleDuration = getTimescale() * Math.max(0, sampleDurationNs) / 1000000L;
final StreamingSample ss = new StreamingSampleImpl(nals, sampleDuration);
ss.addSampleExtension(createSampleFlagsSampleExtension(nu, sliceHeader));
final SampleExtension pictureOrderCountType0SampleExtension = createPictureOrderCountType0SampleExtension(sliceHeader);
if (pictureOrderCountType0SampleExtension != null) {
ss.addSampleExtension(pictureOrderCountType0SampleExtension);
}
return ss;
}
private void handlePPS(final @NonNull ByteBuffer nal) {
nal.position(1);
try {
final PictureParameterSet _pictureParameterSet = PictureParameterSet.read(nal);
final ByteBuffer oldPpsSameId = ppsIdToPpsBytes.get(_pictureParameterSet.pic_parameter_set_id);
if (oldPpsSameId != null && !oldPpsSameId.equals(nal)) {
throw new MuxingException("OMG - I got two SPS with same ID but different settings! (AVC3 is the solution)");
} else {
ppsIdToPpsBytes.put(_pictureParameterSet.pic_parameter_set_id, nal);
ppsIdToPps.put(_pictureParameterSet.pic_parameter_set_id, _pictureParameterSet);
}
} catch (IOException e) {
throw new MuxingException("That's surprising to get IOException when working on ByteArrayInputStream", e);
}
}
private @NonNull SeqParameterSet handleSPS(final @NonNull ByteBuffer nal) {
nal.position(1);
try {
final SeqParameterSet seqParameterSet = SeqParameterSet.read(nal);
final ByteBuffer oldSpsSameId = spsIdToSpsBytes.get(seqParameterSet.seq_parameter_set_id);
if (oldSpsSameId != null && !oldSpsSameId.equals(nal)) {
throw new MuxingException("OMG - I got two SPS with same ID but different settings!");
} else {
spsIdToSpsBytes.put(seqParameterSet.seq_parameter_set_id, nal);
spsIdToSps.put(seqParameterSet.seq_parameter_set_id, seqParameterSet);
}
return seqParameterSet;
} catch (IOException e) {
throw new MuxingException("That's surprising to get IOException when working on ByteArrayInputStream", e);
}
}
class FirstVclNalDetector {
final SliceHeader sliceHeader;
final int frame_num;
final int pic_parameter_set_id;
final boolean field_pic_flag;
final boolean bottom_field_flag;
final int nal_ref_idc;
final int pic_order_cnt_type;
final int delta_pic_order_cnt_bottom;
final int pic_order_cnt_lsb;
final int delta_pic_order_cnt_0;
final int delta_pic_order_cnt_1;
final int idr_pic_id;
FirstVclNalDetector(ByteBuffer nal, int nal_ref_idc, int nal_unit_type) {
SliceHeader sh = new SliceHeader(nal, spsIdToSps, ppsIdToPps, nal_unit_type == 5);
this.sliceHeader = sh;
this.frame_num = sh.frame_num;
this.pic_parameter_set_id = sh.pic_parameter_set_id;
this.field_pic_flag = sh.field_pic_flag;
this.bottom_field_flag = sh.bottom_field_flag;
this.nal_ref_idc = nal_ref_idc;
this.pic_order_cnt_type = spsIdToSps.get(ppsIdToPps.get(sh.pic_parameter_set_id).seq_parameter_set_id).pic_order_cnt_type;
this.delta_pic_order_cnt_bottom = sh.delta_pic_order_cnt_bottom;
this.pic_order_cnt_lsb = sh.pic_order_cnt_lsb;
this.delta_pic_order_cnt_0 = sh.delta_pic_order_cnt_0;
this.delta_pic_order_cnt_1 = sh.delta_pic_order_cnt_1;
this.idr_pic_id = sh.idr_pic_id;
}
boolean isFirstInNew(FirstVclNalDetector nu) {
if (nu.frame_num != frame_num) {
return true;
}
if (nu.pic_parameter_set_id != pic_parameter_set_id) {
return true;
}
if (nu.field_pic_flag != field_pic_flag) {
return true;
}
if (nu.field_pic_flag) {
if (nu.bottom_field_flag != bottom_field_flag) {
return true;
}
}
if (nu.nal_ref_idc != nal_ref_idc) {
return true;
}
if (nu.pic_order_cnt_type == 0 && pic_order_cnt_type == 0) {
if (nu.pic_order_cnt_lsb != pic_order_cnt_lsb) {
return true;
}
if (nu.delta_pic_order_cnt_bottom != delta_pic_order_cnt_bottom) {
return true;
}
}
if (nu.pic_order_cnt_type == 1 && pic_order_cnt_type == 1) {
if (nu.delta_pic_order_cnt_0 != delta_pic_order_cnt_0) {
return true;
}
if (nu.delta_pic_order_cnt_1 != delta_pic_order_cnt_1) {
return true;
}
}
return false;
}
}
static class PictureOrderCountType0SampleExtension implements SampleExtension {
int picOrderCntMsb;
int picOrderCountLsb;
PictureOrderCountType0SampleExtension(final @NonNull SliceHeader currentSlice, final @Nullable PictureOrderCountType0SampleExtension previous) {
int prevPicOrderCntLsb = 0;
int prevPicOrderCntMsb = 0;
if (previous != null) {
prevPicOrderCntLsb = previous.picOrderCountLsb;
prevPicOrderCntMsb = previous.picOrderCntMsb;
}
final int maxPicOrderCountLsb = (1 << (currentSlice.sps.log2_max_pic_order_cnt_lsb_minus4 + 4));
// System.out.print(" pic_order_cnt_lsb " + pic_order_cnt_lsb + " " + max_pic_order_count);
picOrderCountLsb = currentSlice.pic_order_cnt_lsb;
picOrderCntMsb = 0;
if ((picOrderCountLsb < prevPicOrderCntLsb) && ((prevPicOrderCntLsb - picOrderCountLsb) >= (maxPicOrderCountLsb / 2))) {
picOrderCntMsb = prevPicOrderCntMsb + maxPicOrderCountLsb;
} else if ((picOrderCountLsb > prevPicOrderCntLsb) && ((picOrderCountLsb - prevPicOrderCntLsb) > (maxPicOrderCountLsb / 2))) {
picOrderCntMsb = prevPicOrderCntMsb - maxPicOrderCountLsb;
} else {
picOrderCntMsb = prevPicOrderCntMsb;
}
}
int getPoc() {
return picOrderCntMsb + picOrderCountLsb;
}
@NonNull
@Override
public String toString() {
return "picOrderCntMsb=" + picOrderCntMsb + ", picOrderCountLsb=" + picOrderCountLsb;
}
}
}

Wyświetl plik

@ -0,0 +1,99 @@
/*
* Copyright 2008-2019 JCodecProject
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer. Redistributions in binary form
* must reproduce the above copyright notice, this list of conditions and the
* following disclaimer in the documentation and/or other materials provided with
* the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* https://github.com/jcodec/jcodec/blob/master/src/main/java/org/jcodec/codecs/h264/H264Utils.java
*
* This file has been modified by Signal.
*/
package org.thoughtcrime.securesms.video.videoconverter.muxer;
import androidx.annotation.NonNull;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.ArrayList;
import java.util.List;
final class H264Utils {
private H264Utils() {}
static @NonNull List<ByteBuffer> getNals(ByteBuffer buffer) {
final List<ByteBuffer> nals = new ArrayList<>();
ByteBuffer nal;
while ((nal = nextNALUnit(buffer)) != null) {
nals.add(nal);
}
return nals;
}
static ByteBuffer nextNALUnit(ByteBuffer buf) {
skipToNALUnit(buf);
return gotoNALUnit(buf);
}
static void skipToNALUnit(ByteBuffer buf) {
if (!buf.hasRemaining())
return;
int val = 0xffffffff;
while (buf.hasRemaining()) {
val <<= 8;
val |= (buf.get() & 0xff);
if ((val & 0xffffff) == 1) {
buf.position(buf.position());
break;
}
}
}
/**
* Finds next Nth H.264 bitstream NAL unit (0x00000001) and returns the data
* that preceeds it as a ByteBuffer slice
* <p>
* Segment byte order is always little endian
* <p>
* TODO: emulation prevention
*/
static ByteBuffer gotoNALUnit(ByteBuffer buf) {
if (!buf.hasRemaining())
return null;
int from = buf.position();
ByteBuffer result = buf.slice();
result.order(ByteOrder.BIG_ENDIAN);
int val = 0xffffffff;
while (buf.hasRemaining()) {
val <<= 8;
val |= (buf.get() & 0xff);
if ((val & 0xffffff) == 1) {
buf.position(buf.position() - (val == 1 ? 4 : 3));
result.limit(buf.position() - from);
break;
}
}
return result;
}
}

Wyświetl plik

@ -0,0 +1,261 @@
package org.thoughtcrime.securesms.video.videoconverter.muxer;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import org.mp4parser.boxes.iso14496.part12.SampleDescriptionBox;
import org.mp4parser.boxes.iso14496.part15.HevcConfigurationBox;
import org.mp4parser.boxes.iso14496.part15.HevcDecoderConfigurationRecord;
import org.mp4parser.boxes.sampleentry.VisualSampleEntry;
import org.mp4parser.muxer.tracks.CleanInputStream;
import org.mp4parser.muxer.tracks.h265.H265NalUnitHeader;
import org.mp4parser.muxer.tracks.h265.H265NalUnitTypes;
import org.mp4parser.muxer.tracks.h265.SequenceParameterSetRbsp;
import org.mp4parser.streaming.StreamingSample;
import org.mp4parser.streaming.extensions.DimensionTrackExtension;
import org.mp4parser.streaming.extensions.SampleFlagsSampleExtension;
import org.mp4parser.streaming.input.AbstractStreamingTrack;
import org.mp4parser.streaming.input.StreamingSampleImpl;
import org.mp4parser.tools.ByteBufferByteChannel;
import org.mp4parser.tools.IsoTypeReader;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.Channels;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
abstract class HevcTrack extends AbstractStreamingTrack implements H265NalUnitTypes {
private final ArrayList<ByteBuffer> bufferedNals = new ArrayList<>();
private boolean vclNalUnitSeenInAU;
private boolean isIdr = true;
private long currentPresentationTimeUs;
private final SampleDescriptionBox stsd;
HevcTrack(final @NonNull List<ByteBuffer> csd) throws IOException {
final ArrayList<ByteBuffer> sps = new ArrayList<>();
final ArrayList<ByteBuffer> pps = new ArrayList<>();
final ArrayList<ByteBuffer> vps = new ArrayList<>();
SequenceParameterSetRbsp spsStruct = null;
for (ByteBuffer nal : csd) {
final H265NalUnitHeader unitHeader = getNalUnitHeader(nal);
nal.position(0);
// collect sps/vps/pps
switch (unitHeader.nalUnitType) {
case NAL_TYPE_PPS_NUT:
pps.add(nal.duplicate());
break;
case NAL_TYPE_VPS_NUT:
vps.add(nal.duplicate());
break;
case NAL_TYPE_SPS_NUT:
sps.add(nal.duplicate());
nal.position(2);
spsStruct = new SequenceParameterSetRbsp(new CleanInputStream(Channels.newInputStream(new ByteBufferByteChannel(nal.slice()))));
break;
case NAL_TYPE_PREFIX_SEI_NUT:
//new SEIMessage(new BitReaderBuffer(nal.slice()));
break;
}
}
stsd = new SampleDescriptionBox();
stsd.addBox(createSampleEntry(sps, pps, vps, spsStruct));
}
@Override
public long getTimescale() {
return 90000;
}
@Override
public String getHandler() {
return "vide";
}
@Override
public String getLanguage() {
return "\u0060\u0060\u0060"; // 0 in Iso639
}
@Override
public SampleDescriptionBox getSampleDescriptionBox() {
return stsd;
}
@Override
public void close() {
}
void consumeLastNal() throws IOException {
wrapUp(bufferedNals, currentPresentationTimeUs);
}
void consumeNal(final @NonNull ByteBuffer nal, final long presentationTimeUs) throws IOException {
final H265NalUnitHeader unitHeader = getNalUnitHeader(nal);
final boolean isVcl = isVcl(unitHeader);
//
if (vclNalUnitSeenInAU) { // we need at least 1 VCL per AU
// This branch checks if we encountered the start of a samples/AU
if (isVcl) {
if ((nal.get(2) & -128) != 0) { // this is: first_slice_segment_in_pic_flag u(1)
wrapUp(bufferedNals, presentationTimeUs);
}
} else {
switch (unitHeader.nalUnitType) {
case NAL_TYPE_PREFIX_SEI_NUT:
case NAL_TYPE_AUD_NUT:
case NAL_TYPE_PPS_NUT:
case NAL_TYPE_VPS_NUT:
case NAL_TYPE_SPS_NUT:
case NAL_TYPE_RSV_NVCL41:
case NAL_TYPE_RSV_NVCL42:
case NAL_TYPE_RSV_NVCL43:
case NAL_TYPE_RSV_NVCL44:
case NAL_TYPE_UNSPEC48:
case NAL_TYPE_UNSPEC49:
case NAL_TYPE_UNSPEC50:
case NAL_TYPE_UNSPEC51:
case NAL_TYPE_UNSPEC52:
case NAL_TYPE_UNSPEC53:
case NAL_TYPE_UNSPEC54:
case NAL_TYPE_UNSPEC55:
case NAL_TYPE_EOB_NUT: // a bit special but also causes a sample to be formed
case NAL_TYPE_EOS_NUT:
wrapUp(bufferedNals, presentationTimeUs);
break;
}
}
}
switch (unitHeader.nalUnitType) {
case NAL_TYPE_SPS_NUT:
case NAL_TYPE_VPS_NUT:
case NAL_TYPE_PPS_NUT:
case NAL_TYPE_EOB_NUT:
case NAL_TYPE_EOS_NUT:
case NAL_TYPE_AUD_NUT:
case NAL_TYPE_FD_NUT:
// ignore these
break;
default:
bufferedNals.add(nal);
break;
}
if (isVcl) {
isIdr = unitHeader.nalUnitType == NAL_TYPE_IDR_W_RADL || unitHeader.nalUnitType == NAL_TYPE_IDR_N_LP;
vclNalUnitSeenInAU = true;
}
}
private void wrapUp(final @NonNull List<ByteBuffer> nals, final long presentationTimeUs) throws IOException {
final long duration = presentationTimeUs - currentPresentationTimeUs;
currentPresentationTimeUs = presentationTimeUs;
final StreamingSample sample = new StreamingSampleImpl(
nals, getTimescale() * Math.max(0, duration) / 1000000L);
final SampleFlagsSampleExtension sampleFlagsSampleExtension = new SampleFlagsSampleExtension();
sampleFlagsSampleExtension.setSampleIsNonSyncSample(!isIdr);
sample.addSampleExtension(sampleFlagsSampleExtension);
sampleSink.acceptSample(sample, this);
vclNalUnitSeenInAU = false;
isIdr = true;
nals.clear();
}
private static @NonNull H265NalUnitHeader getNalUnitHeader(final @NonNull ByteBuffer nal) {
nal.position(0);
final int nalUnitHeaderValue = IsoTypeReader.readUInt16(nal);
final H265NalUnitHeader nalUnitHeader = new H265NalUnitHeader();
nalUnitHeader.forbiddenZeroFlag = (nalUnitHeaderValue & 0x8000) >> 15;
nalUnitHeader.nalUnitType = (nalUnitHeaderValue & 0x7E00) >> 9;
nalUnitHeader.nuhLayerId = (nalUnitHeaderValue & 0x1F8) >> 3;
nalUnitHeader.nuhTemporalIdPlusOne = (nalUnitHeaderValue & 0x7);
return nalUnitHeader;
}
private @NonNull VisualSampleEntry createSampleEntry(
final @NonNull ArrayList<ByteBuffer> sps,
final @NonNull ArrayList<ByteBuffer> pps,
final @NonNull ArrayList<ByteBuffer> vps,
final @Nullable SequenceParameterSetRbsp spsStruct)
{
final VisualSampleEntry visualSampleEntry = new VisualSampleEntry("hvc1");
visualSampleEntry.setDataReferenceIndex(1);
visualSampleEntry.setDepth(24);
visualSampleEntry.setFrameCount(1);
visualSampleEntry.setHorizresolution(72);
visualSampleEntry.setVertresolution(72);
visualSampleEntry.setCompressorname("HEVC Coding");
final HevcConfigurationBox hevcConfigurationBox = new HevcConfigurationBox();
hevcConfigurationBox.getHevcDecoderConfigurationRecord().setConfigurationVersion(1);
if (spsStruct != null) {
visualSampleEntry.setWidth(spsStruct.pic_width_in_luma_samples);
visualSampleEntry.setHeight(spsStruct.pic_height_in_luma_samples);
final DimensionTrackExtension dte = this.getTrackExtension(DimensionTrackExtension.class);
if (dte == null) {
this.addTrackExtension(new DimensionTrackExtension(spsStruct.pic_width_in_luma_samples, spsStruct.pic_height_in_luma_samples));
}
final HevcDecoderConfigurationRecord hevcDecoderConfigurationRecord = hevcConfigurationBox.getHevcDecoderConfigurationRecord();
hevcDecoderConfigurationRecord.setChromaFormat(spsStruct.chroma_format_idc);
hevcDecoderConfigurationRecord.setGeneral_profile_idc(spsStruct.general_profile_idc);
hevcDecoderConfigurationRecord.setGeneral_profile_compatibility_flags(spsStruct.general_profile_compatibility_flags);
hevcDecoderConfigurationRecord.setGeneral_constraint_indicator_flags(spsStruct.general_constraint_indicator_flags);
hevcDecoderConfigurationRecord.setGeneral_level_idc(spsStruct.general_level_idc);
hevcDecoderConfigurationRecord.setGeneral_tier_flag(spsStruct.general_tier_flag);
hevcDecoderConfigurationRecord.setGeneral_profile_space(spsStruct.general_profile_space);
hevcDecoderConfigurationRecord.setBitDepthChromaMinus8(spsStruct.bit_depth_chroma_minus8);
hevcDecoderConfigurationRecord.setBitDepthLumaMinus8(spsStruct.bit_depth_luma_minus8);
hevcDecoderConfigurationRecord.setTemporalIdNested(spsStruct.sps_temporal_id_nesting_flag);
}
hevcConfigurationBox.getHevcDecoderConfigurationRecord().setLengthSizeMinusOne(3);
final HevcDecoderConfigurationRecord.Array vpsArray = new HevcDecoderConfigurationRecord.Array();
vpsArray.array_completeness = false;
vpsArray.nal_unit_type = NAL_TYPE_VPS_NUT;
vpsArray.nalUnits = new ArrayList<>();
for (ByteBuffer vp : vps) {
vpsArray.nalUnits.add(Utils.toArray(vp));
}
final HevcDecoderConfigurationRecord.Array spsArray = new HevcDecoderConfigurationRecord.Array();
spsArray.array_completeness = false;
spsArray.nal_unit_type = NAL_TYPE_SPS_NUT;
spsArray.nalUnits = new ArrayList<>();
for (ByteBuffer sp : sps) {
spsArray.nalUnits.add(Utils.toArray(sp));
}
final HevcDecoderConfigurationRecord.Array ppsArray = new HevcDecoderConfigurationRecord.Array();
ppsArray.array_completeness = false;
ppsArray.nal_unit_type = NAL_TYPE_PPS_NUT;
ppsArray.nalUnits = new ArrayList<>();
for (ByteBuffer pp : pps) {
ppsArray.nalUnits.add(Utils.toArray(pp));
}
hevcConfigurationBox.getArrays().addAll(Arrays.asList(spsArray, vpsArray, ppsArray));
visualSampleEntry.addBox(hevcConfigurationBox);
return visualSampleEntry;
}
private boolean isVcl(final @NonNull H265NalUnitHeader nalUnitHeader) {
return nalUnitHeader.nalUnitType >= 0 && nalUnitHeader.nalUnitType <= 31;
}
}

Wyświetl plik

@ -0,0 +1,424 @@
/*
* Copyright (C) https://github.com/sannies/mp4parser/blob/master/LICENSE
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* https://github.com/sannies/mp4parser/blob/4ed724754cde751c3f27fdda51f288df4f4c5db5/streaming/src/main/java/org/mp4parser/streaming/output/mp4/StandardMp4Writer.java
*
* This file has been modified by Signal.
*/
package org.thoughtcrime.securesms.video.videoconverter.muxer;
import androidx.annotation.NonNull;
import org.mp4parser.Box;
import org.mp4parser.boxes.iso14496.part12.ChunkOffsetBox;
import org.mp4parser.boxes.iso14496.part12.CompositionTimeToSample;
import org.mp4parser.boxes.iso14496.part12.FileTypeBox;
import org.mp4parser.boxes.iso14496.part12.MediaHeaderBox;
import org.mp4parser.boxes.iso14496.part12.MovieBox;
import org.mp4parser.boxes.iso14496.part12.MovieHeaderBox;
import org.mp4parser.boxes.iso14496.part12.SampleSizeBox;
import org.mp4parser.boxes.iso14496.part12.SampleTableBox;
import org.mp4parser.boxes.iso14496.part12.SampleToChunkBox;
import org.mp4parser.boxes.iso14496.part12.SyncSampleBox;
import org.mp4parser.boxes.iso14496.part12.TimeToSampleBox;
import org.mp4parser.boxes.iso14496.part12.TrackBox;
import org.mp4parser.boxes.iso14496.part12.TrackHeaderBox;
import org.mp4parser.streaming.StreamingSample;
import org.mp4parser.streaming.StreamingTrack;
import org.mp4parser.streaming.extensions.CompositionTimeSampleExtension;
import org.mp4parser.streaming.extensions.CompositionTimeTrackExtension;
import org.mp4parser.streaming.extensions.SampleFlagsSampleExtension;
import org.mp4parser.streaming.extensions.TrackIdTrackExtension;
import org.mp4parser.streaming.output.SampleSink;
import org.mp4parser.streaming.output.mp4.DefaultBoxes;
import org.mp4parser.tools.Mp4Arrays;
import org.mp4parser.tools.Mp4Math;
import org.mp4parser.tools.Path;
import org.signal.core.util.logging.Log;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.WritableByteChannel;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Queue;
import java.util.concurrent.ConcurrentHashMap;
import static org.mp4parser.tools.CastUtils.l2i;
/**
* Creates an MP4 file with ftyp, mdat+, moov order.
* A very special property of this variant is that it written sequentially. You can start transferring the
* data while the <code>sink</code> receives it. (in contrast to typical implementations which need random
* access to write length fields at the beginning of the file)
*/
final class Mp4Writer extends DefaultBoxes implements SampleSink {
private static final String TAG = "Mp4Writer";
private final WritableByteChannel sink;
private final List<StreamingTrack> source;
private final Date creationTime = new Date();
/**
* Contains the start time of the next segment in line that will be created.
*/
private final Map<StreamingTrack, Long> nextChunkCreateStartTime = new ConcurrentHashMap<>();
/**
* Contains the start time of the next segment in line that will be written.
*/
private final Map<StreamingTrack, Long> nextChunkWriteStartTime = new ConcurrentHashMap<>();
/**
* Contains the next sample's start time.
*/
private final Map<StreamingTrack, Long> nextSampleStartTime = new HashMap<>();
/**
* Buffers the samples per track until there are enough samples to form a Segment.
*/
private final Map<StreamingTrack, List<StreamingSample>> sampleBuffers = new HashMap<>();
private final Map<StreamingTrack, TrackBox> trackBoxes = new HashMap<>();
/**
* Buffers segments until it's time for a segment to be written.
*/
private final Map<StreamingTrack, Queue<ChunkContainer>> chunkBuffers = new ConcurrentHashMap<>();
private final Map<StreamingTrack, Long> chunkNumbers = new HashMap<>();
private final Map<StreamingTrack, Long> sampleNumbers = new HashMap<>();
private long bytesWritten = 0;
Mp4Writer(final @NonNull List<StreamingTrack> source, final @NonNull WritableByteChannel sink) throws IOException {
this.source = new ArrayList<>(source);
this.sink = sink;
final HashSet<Long> trackIds = new HashSet<>();
for (StreamingTrack streamingTrack : source) {
streamingTrack.setSampleSink(this);
chunkNumbers.put(streamingTrack, 1L);
sampleNumbers.put(streamingTrack, 1L);
nextSampleStartTime.put(streamingTrack, 0L);
nextChunkCreateStartTime.put(streamingTrack, 0L);
nextChunkWriteStartTime.put(streamingTrack, 0L);
sampleBuffers.put(streamingTrack, new ArrayList<>());
chunkBuffers.put(streamingTrack, new LinkedList<>());
if (streamingTrack.getTrackExtension(TrackIdTrackExtension.class) != null) {
final TrackIdTrackExtension trackIdTrackExtension = streamingTrack.getTrackExtension(TrackIdTrackExtension.class);
if (trackIds.contains(trackIdTrackExtension.getTrackId())) {
throw new MuxingException("There may not be two tracks with the same trackID within one file");
}
trackIds.add(trackIdTrackExtension.getTrackId());
}
}
for (StreamingTrack streamingTrack : source) {
if (streamingTrack.getTrackExtension(TrackIdTrackExtension.class) == null) {
long maxTrackId = 0;
for (Long trackId : trackIds) {
maxTrackId = Math.max(trackId, maxTrackId);
}
final TrackIdTrackExtension tiExt = new TrackIdTrackExtension(maxTrackId + 1);
trackIds.add(tiExt.getTrackId());
streamingTrack.addTrackExtension(tiExt);
}
}
final List<String> minorBrands = new LinkedList<>();
minorBrands.add("isom");
minorBrands.add("mp42");
write(sink, new FileTypeBox("mp42", 0, minorBrands));
}
public void close() throws IOException {
for (StreamingTrack streamingTrack : source) {
writeChunkContainer(createChunkContainer(streamingTrack));
streamingTrack.close();
}
write(sink, createMoov());
}
private Box createMoov() {
final MovieBox movieBox = new MovieBox();
final MovieHeaderBox mvhd = createMvhd();
movieBox.addBox(mvhd);
// update durations
for (StreamingTrack streamingTrack : source) {
final TrackBox tb = trackBoxes.get(streamingTrack);
final MediaHeaderBox mdhd = Path.getPath(tb, "mdia[0]/mdhd[0]");
mdhd.setCreationTime(creationTime);
mdhd.setModificationTime(creationTime);
mdhd.setDuration(Objects.requireNonNull(nextSampleStartTime.get(streamingTrack)));
mdhd.setTimescale(streamingTrack.getTimescale());
mdhd.setLanguage(streamingTrack.getLanguage());
movieBox.addBox(tb);
final TrackHeaderBox tkhd = Path.getPath(tb, "tkhd[0]");
final double duration = (double) Objects.requireNonNull(nextSampleStartTime.get(streamingTrack)) / streamingTrack.getTimescale();
tkhd.setDuration((long) (mvhd.getTimescale() * duration));
}
// metadata here
return movieBox;
}
private void sortTracks() {
Collections.sort(source, (o1, o2) -> {
// compare times and account for timestamps!
final long a = Objects.requireNonNull(nextChunkWriteStartTime.get(o1)) * o2.getTimescale();
final long b = Objects.requireNonNull(nextChunkWriteStartTime.get(o2)) * o1.getTimescale();
return (int) Math.signum(a - b);
});
}
@Override
protected MovieHeaderBox createMvhd() {
final MovieHeaderBox mvhd = new MovieHeaderBox();
mvhd.setVersion(1);
mvhd.setCreationTime(creationTime);
mvhd.setModificationTime(creationTime);
long[] timescales = new long[0];
long maxTrackId = 0;
double duration = 0;
for (StreamingTrack streamingTrack : source) {
duration = Math.max((double) Objects.requireNonNull(nextSampleStartTime.get(streamingTrack)) / streamingTrack.getTimescale(), duration);
timescales = Mp4Arrays.copyOfAndAppend(timescales, streamingTrack.getTimescale());
maxTrackId = Math.max(streamingTrack.getTrackExtension(TrackIdTrackExtension.class).getTrackId(), maxTrackId);
}
mvhd.setTimescale(Mp4Math.lcm(timescales));
mvhd.setDuration((long) (Mp4Math.lcm(timescales) * duration));
// find the next available trackId
mvhd.setNextTrackId(maxTrackId + 1);
return mvhd;
}
private void write(final @NonNull WritableByteChannel out, Box... boxes) throws IOException {
for (Box box1 : boxes) {
box1.getBox(out);
bytesWritten += box1.getSize();
}
}
/**
* Tests if the currently received samples for a given track
* are already a 'chunk' as we want to have it. The next
* sample will not be part of the chunk
* will be added to the fragment buffer later.
*
* @param streamingTrack track to test
* @param next the lastest samples
* @return true if a chunk is to b e created.
*/
private boolean isChunkReady(StreamingTrack streamingTrack, StreamingSample next) {
final long ts = Objects.requireNonNull(nextSampleStartTime.get(streamingTrack));
final long cfst = Objects.requireNonNull(nextChunkCreateStartTime.get(streamingTrack));
return (ts >= cfst + 2 * streamingTrack.getTimescale());
// chunk interleave of 2 seconds
}
private void writeChunkContainer(ChunkContainer chunkContainer) throws IOException {
final TrackBox tb = trackBoxes.get(chunkContainer.streamingTrack);
final ChunkOffsetBox stco = Objects.requireNonNull(Path.getPath(tb, "mdia[0]/minf[0]/stbl[0]/stco[0]"));
stco.setChunkOffsets(Mp4Arrays.copyOfAndAppend(stco.getChunkOffsets(), bytesWritten + 8));
write(sink, chunkContainer.mdat);
}
public void acceptSample(
final @NonNull StreamingSample streamingSample,
final @NonNull StreamingTrack streamingTrack) throws IOException
{
TrackBox tb = trackBoxes.get(streamingTrack);
if (tb == null) {
tb = new TrackBox();
tb.addBox(createTkhd(streamingTrack));
tb.addBox(createMdia(streamingTrack));
trackBoxes.put(streamingTrack, tb);
}
if (isChunkReady(streamingTrack, streamingSample)) {
final ChunkContainer chunkContainer = createChunkContainer(streamingTrack);
//System.err.println("Creating fragment for " + streamingTrack);
Objects.requireNonNull(sampleBuffers.get(streamingTrack)).clear();
nextChunkCreateStartTime.put(streamingTrack, Objects.requireNonNull(nextChunkCreateStartTime.get(streamingTrack)) + chunkContainer.duration);
final Queue<ChunkContainer> chunkQueue = Objects.requireNonNull(chunkBuffers.get(streamingTrack));
chunkQueue.add(chunkContainer);
if (source.get(0) == streamingTrack) {
Queue<ChunkContainer> tracksFragmentQueue;
StreamingTrack currentStreamingTrack;
// This will write AT LEAST the currently created fragment and possibly a few more
while (!(tracksFragmentQueue = chunkBuffers.get((currentStreamingTrack = this.source.get(0)))).isEmpty()) {
final ChunkContainer currentFragmentContainer = tracksFragmentQueue.remove();
writeChunkContainer(currentFragmentContainer);
Log.d(TAG, "write chunk " + currentStreamingTrack.getHandler() + ". duration " + (double) currentFragmentContainer.duration / currentStreamingTrack.getTimescale());
final long ts = Objects.requireNonNull(nextChunkWriteStartTime.get(currentStreamingTrack)) + currentFragmentContainer.duration;
nextChunkWriteStartTime.put(currentStreamingTrack, ts);
Log.d(TAG, currentStreamingTrack.getHandler() + " track advanced to " + (double) ts / currentStreamingTrack.getTimescale());
sortTracks();
}
} else {
Log.d(TAG, streamingTrack.getHandler() + " track delayed, queue size is " + chunkQueue.size());
}
}
Objects.requireNonNull(sampleBuffers.get(streamingTrack)).add(streamingSample);
nextSampleStartTime.put(streamingTrack, Objects.requireNonNull(nextSampleStartTime.get(streamingTrack)) + streamingSample.getDuration());
}
private ChunkContainer createChunkContainer(final @NonNull StreamingTrack streamingTrack) {
final List<StreamingSample> samples = Objects.requireNonNull(sampleBuffers.get(streamingTrack));
final long chunkNumber = Objects.requireNonNull(chunkNumbers.get(streamingTrack));
chunkNumbers.put(streamingTrack, chunkNumber + 1);
final ChunkContainer cc = new ChunkContainer();
cc.streamingTrack = streamingTrack;
cc.mdat = new Mdat(samples);
cc.duration = Objects.requireNonNull(nextSampleStartTime.get(streamingTrack)) - Objects.requireNonNull(nextChunkCreateStartTime.get(streamingTrack));
final TrackBox tb = trackBoxes.get(streamingTrack);
final SampleTableBox stbl = Objects.requireNonNull(Path.getPath(tb, "mdia[0]/minf[0]/stbl[0]"));
final SampleToChunkBox stsc = Objects.requireNonNull(Path.getPath(stbl, "stsc[0]"));
if (stsc.getEntries().isEmpty()) {
final List<SampleToChunkBox.Entry> entries = new ArrayList<>();
stsc.setEntries(entries);
entries.add(new SampleToChunkBox.Entry(chunkNumber, samples.size(), 1));
} else {
final SampleToChunkBox.Entry e = stsc.getEntries().get(stsc.getEntries().size() - 1);
if (e.getSamplesPerChunk() != samples.size()) {
stsc.getEntries().add(new SampleToChunkBox.Entry(chunkNumber, samples.size(), 1));
}
}
long sampleNumber = Objects.requireNonNull(sampleNumbers.get(streamingTrack));
final SampleSizeBox stsz = Objects.requireNonNull(Path.getPath(stbl, "stsz[0]"));
final TimeToSampleBox stts = Objects.requireNonNull(Path.getPath(stbl, "stts[0]"));
SyncSampleBox stss = Path.getPath(stbl, "stss[0]");
CompositionTimeToSample ctts = Path.getPath(stbl, "ctts[0]");
if (streamingTrack.getTrackExtension(CompositionTimeTrackExtension.class) != null) {
if (ctts == null) {
ctts = new CompositionTimeToSample();
ctts.setEntries(new ArrayList<>());
final ArrayList<Box> bs = new ArrayList<>(stbl.getBoxes());
bs.add(bs.indexOf(stts), ctts);
}
}
final long[] sampleSizes = new long[samples.size()];
int i = 0;
for (StreamingSample sample : samples) {
sampleSizes[i++] = sample.getContent().limit();
if (ctts != null) {
ctts.getEntries().add(new CompositionTimeToSample.Entry(1, l2i(sample.getSampleExtension(CompositionTimeSampleExtension.class).getCompositionTimeOffset())));
}
if (stts.getEntries().isEmpty()) {
final ArrayList<TimeToSampleBox.Entry> entries = new ArrayList<>(stts.getEntries());
entries.add(new TimeToSampleBox.Entry(1, sample.getDuration()));
stts.setEntries(entries);
} else {
final TimeToSampleBox.Entry sttsEntry = stts.getEntries().get(stts.getEntries().size() - 1);
if (sttsEntry.getDelta() == sample.getDuration()) {
sttsEntry.setCount(sttsEntry.getCount() + 1);
} else {
stts.getEntries().add(new TimeToSampleBox.Entry(1, sample.getDuration()));
}
}
final SampleFlagsSampleExtension sampleFlagsSampleExtension = sample.getSampleExtension(SampleFlagsSampleExtension.class);
if (sampleFlagsSampleExtension != null && sampleFlagsSampleExtension.isSyncSample()) {
if (stss == null) {
stss = new SyncSampleBox();
stbl.addBox(stss);
}
stss.setSampleNumber(Mp4Arrays.copyOfAndAppend(stss.getSampleNumber(), sampleNumber));
}
sampleNumber++;
}
stsz.setSampleSizes(Mp4Arrays.copyOfAndAppend(stsz.getSampleSizes(), sampleSizes));
sampleNumbers.put(streamingTrack, sampleNumber);
samples.clear();
Log.d(TAG, "chunk container created for " + streamingTrack.getHandler() + ". mdat size: " + cc.mdat.size + ". chunk duration is " + (double) cc.duration / streamingTrack.getTimescale());
return cc;
}
protected @NonNull Box createMdhd(final @NonNull StreamingTrack streamingTrack) {
final MediaHeaderBox mdhd = new MediaHeaderBox();
mdhd.setCreationTime(creationTime);
mdhd.setModificationTime(creationTime);
//mdhd.setDuration(nextSampleStartTime.get(streamingTrack)); will update at the end, in createMoov
mdhd.setTimescale(streamingTrack.getTimescale());
mdhd.setLanguage(streamingTrack.getLanguage());
return mdhd;
}
private class Mdat implements Box {
final ArrayList<StreamingSample> samples;
long size;
Mdat(final @NonNull List<StreamingSample> samples) {
this.samples = new ArrayList<>(samples);
size = 8;
for (StreamingSample sample : samples) {
size += sample.getContent().limit();
}
}
@Override
public String getType() {
return "mdat";
}
@Override
public long getSize() {
return size;
}
@Override
public void getBox(WritableByteChannel writableByteChannel) throws IOException {
writableByteChannel.write(ByteBuffer.wrap(new byte[]{
(byte) ((size & 0xff000000) >> 24),
(byte) ((size & 0xff0000) >> 16),
(byte) ((size & 0xff00) >> 8),
(byte) ((size & 0xff)),
109, 100, 97, 116, // mdat
}));
for (StreamingSample sample : samples) {
writableByteChannel.write((ByteBuffer) sample.getContent().rewind());
}
}
}
private class ChunkContainer {
Mdat mdat;
StreamingTrack streamingTrack;
long duration;
}
}

Wyświetl plik

@ -0,0 +1,12 @@
package org.thoughtcrime.securesms.video.videoconverter.muxer;
final class MuxingException extends RuntimeException {
public MuxingException(String message) {
super(message);
}
public MuxingException(String message, Throwable cause) {
super(message, cause);
}
}

Wyświetl plik

@ -0,0 +1,144 @@
package org.thoughtcrime.securesms.video.videoconverter.muxer;
import android.media.MediaCodec;
import android.media.MediaFormat;
import androidx.annotation.NonNull;
import org.mp4parser.streaming.StreamingTrack;
import org.thoughtcrime.securesms.video.videoconverter.Muxer;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.nio.channels.Channels;
import java.util.ArrayList;
import java.util.List;
public final class StreamingMuxer implements Muxer {
private final OutputStream outputStream;
private final List<MediaCodecTrack> tracks = new ArrayList<>();
private Mp4Writer mp4Writer;
public StreamingMuxer(OutputStream outputStream) {
this.outputStream = outputStream;
}
@Override
public void start() throws IOException {
final List<StreamingTrack> source = new ArrayList<>();
for (MediaCodecTrack track : tracks) {
source.add((StreamingTrack) track);
}
mp4Writer = new Mp4Writer(source, Channels.newChannel(outputStream));
}
@Override
public void stop() throws IOException {
if (mp4Writer == null) {
throw new IllegalStateException("calling stop prior to start");
}
for (MediaCodecTrack track : tracks) {
track.finish();
}
mp4Writer.close();
mp4Writer = null;
}
@Override
public int addTrack(@NonNull MediaFormat format) throws IOException {
final String mime = format.getString(MediaFormat.KEY_MIME);
switch (mime) {
case "video/avc":
tracks.add(new MediaCodecAvcTrack(format));
break;
case "audio/mp4a-latm":
tracks.add(new MediaCodecAacTrack(format));
break;
case "video/hevc":
tracks.add(new MediaCodecHevcTrack(format));
break;
default:
throw new IllegalArgumentException("unknown track format");
}
return tracks.size() - 1;
}
@Override
public void writeSampleData(int trackIndex, @NonNull ByteBuffer byteBuf, @NonNull MediaCodec.BufferInfo bufferInfo) throws IOException {
tracks.get(trackIndex).writeSampleData(byteBuf, bufferInfo);
}
@Override
public void release() {
}
interface MediaCodecTrack {
void writeSampleData(@NonNull ByteBuffer byteBuf, @NonNull MediaCodec.BufferInfo bufferInfo) throws IOException;
void finish() throws IOException;
}
static class MediaCodecAvcTrack extends AvcTrack implements MediaCodecTrack {
MediaCodecAvcTrack(@NonNull MediaFormat format) {
super(Utils.subBuffer(format.getByteBuffer("csd-0"), 4), Utils.subBuffer(format.getByteBuffer("csd-1"), 4));
}
@Override
public void writeSampleData(@NonNull ByteBuffer byteBuf, @NonNull MediaCodec.BufferInfo bufferInfo) throws IOException {
final List<ByteBuffer> nals = H264Utils.getNals(byteBuf);
for (ByteBuffer nal : nals) {
consumeNal(Utils.clone(nal), bufferInfo.presentationTimeUs);
}
}
@Override
public void finish() throws IOException {
consumeLastNal();
}
}
static class MediaCodecHevcTrack extends HevcTrack implements MediaCodecTrack {
MediaCodecHevcTrack(@NonNull MediaFormat format) throws IOException {
super(H264Utils.getNals(format.getByteBuffer("csd-0")));
}
@Override
public void writeSampleData(@NonNull ByteBuffer byteBuf, @NonNull MediaCodec.BufferInfo bufferInfo) throws IOException {
final List<ByteBuffer> nals = H264Utils.getNals(byteBuf);
for (ByteBuffer nal : nals) {
consumeNal(Utils.clone(nal), bufferInfo.presentationTimeUs);
}
}
@Override
public void finish() throws IOException {
consumeLastNal();
}
}
static class MediaCodecAacTrack extends AacTrack implements MediaCodecTrack {
MediaCodecAacTrack(@NonNull MediaFormat format) {
super(format.getInteger(MediaFormat.KEY_BIT_RATE), format.getInteger(MediaFormat.KEY_BIT_RATE),
format.getInteger(MediaFormat.KEY_SAMPLE_RATE), format.getInteger(MediaFormat.KEY_CHANNEL_COUNT),
format.getInteger(MediaFormat.KEY_AAC_PROFILE));
}
@Override
public void writeSampleData(@NonNull ByteBuffer byteBuf, @NonNull MediaCodec.BufferInfo bufferInfo) throws IOException {
final byte[] buffer = new byte[bufferInfo.size];
byteBuf.position(bufferInfo.offset);
byteBuf.get(buffer, 0, bufferInfo.size);
processSample(ByteBuffer.wrap(buffer));
}
@Override
public void finish() {
}
}
}

Wyświetl plik

@ -0,0 +1,44 @@
package org.thoughtcrime.securesms.video.videoconverter.muxer;
import androidx.annotation.NonNull;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.ArrayList;
import java.util.List;
/**
* Based on https://github.com/jcodec/jcodec/blob/master/src/main/java/org/jcodec/codecs/h264/H264Utils.java
*/
final class Utils {
private Utils() {}
static byte[] toArray(final @NonNull ByteBuffer buf) {
final ByteBuffer newBuf = buf.duplicate();
byte[] bytes = new byte[newBuf.remaining()];
newBuf.get(bytes, 0, bytes.length);
return bytes;
}
public static ByteBuffer clone(final @NonNull ByteBuffer original) {
final ByteBuffer clone = ByteBuffer.allocate(original.capacity());
original.rewind();
clone.put(original);
original.rewind();
clone.flip();
return clone;
}
static @NonNull ByteBuffer subBuffer(final @NonNull ByteBuffer buf, final int start) {
return subBuffer(buf, start, buf.remaining() - start);
}
static @NonNull ByteBuffer subBuffer(final @NonNull ByteBuffer buf, final int start, final int count) {
final ByteBuffer newBuf = buf.duplicate();
byte[] bytes = new byte[count];
newBuf.position(start);
newBuf.get(bytes, 0, bytes.length);
return ByteBuffer.wrap(bytes);
}
}

Wyświetl plik

@ -0,0 +1,24 @@
// Auto-generated, use ./gradlew calculateChecksums to regenerate
dependencyVerification {
verify = [
['androidx.annotation:annotation:1.1.0',
'd38d63edb30f1467818d50aaf05f8a692dea8b31392a049bfa991b159ad5b692'],
['com.google.protobuf:protobuf-javalite:3.10.0',
'215a94dbe100130295906b531bb72a26965c7ac8fcd9a75bf8054a8ac2abf4b4'],
['org.mp4parser:isoparser:1.9.39',
'a3a7172648f1ac4b2a369ecca2861317e472179c842a5217b08643ba0a1dfa12'],
['org.mp4parser:muxer:1.9.39',
'4befe68d411cd889628b53bab211d395899a9ce893ae6766ec2f4fefec5b7835'],
['org.mp4parser:streaming:1.9.39',
'da5151cfc3bf491d550fb9127bba22736f4b7416058d58a1a5fcfdfa3673876d'],
['org.slf4j:slf4j-api:1.7.24',
'baf3c7fe15fefeaf9e5b000d94547379dc48370f22a8797e239c127e7d7756ec'],
]
}