Fix recording size not properly being reported and transferred between server and client

This commit is contained in:
0xb00bface 2023-11-17 21:45:23 +01:00
parent f55a5fc644
commit 3d00b45c7d
6 changed files with 20 additions and 13 deletions

View File

@ -226,7 +226,7 @@ public class Recording implements Serializable {
} else { } else {
long now = System.currentTimeMillis(); long now = System.currentTimeMillis();
if (now - lastSizeUpdate > 2500) { if (now - lastSizeUpdate > 2500) {
log.debug("full size update for {}", this); log.trace("full size update for {}", this);
sizeInByte = getSize(); sizeInByte = getSize();
lastSizeUpdate = now; lastSizeUpdate = now;
} }

View File

@ -397,7 +397,14 @@ public class RemoteRecorder implements Recorder {
if (response.isSuccessful()) { if (response.isSuccessful()) {
RecordingListResponse resp = mapper.readValue(json, RecordingListResponse.class); RecordingListResponse resp = mapper.readValue(json, RecordingListResponse.class);
if (resp.status.equals(SUCCESS)) { if (resp.status.equals(SUCCESS)) {
List<Recording> newRecordings = resp.recordings.stream().map(Mappers.getMapper(RecordingMapper.class)::toRecording).collect(Collectors.toList()); List<Recording> newRecordings = resp.recordings.stream()
.map(dto -> {
var rec = Mappers.getMapper(RecordingMapper.class).toRecording(dto);
rec.setSizeInByte(dto.getSizeInByte());
rec.setLastSizeUpdate(Instant.now().toEpochMilli());
return rec;
})
.collect(Collectors.toList());
// fire changed events // fire changed events
for (Recording recording : recordings) { for (Recording recording : recordings) {
if (newRecordings.contains(recording)) { if (newRecordings.contains(recording)) {

View File

@ -12,12 +12,14 @@ import java.time.Instant;
import java.util.List; import java.util.List;
import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService; import java.util.concurrent.ExecutorService;
import java.util.concurrent.atomic.AtomicLong;
import static ctbrec.recorder.download.StreamSource.UNKNOWN; import static ctbrec.recorder.download.StreamSource.UNKNOWN;
@Slf4j @Slf4j
public abstract class AbstractDownload implements RecordingProcess { public abstract class AbstractDownload implements RecordingProcess {
protected AtomicLong downloadedBytes = new AtomicLong();
protected Instant startTime; protected Instant startTime;
protected Instant rescheduleTime = Instant.now(); protected Instant rescheduleTime = Instant.now();
protected Model model = new UnknownModel(); protected Model model = new UnknownModel();
@ -112,4 +114,9 @@ public abstract class AbstractDownload implements RecordingProcess {
} }
} }
} }
@Override
public AtomicLong getDownloadedBytes() {
return downloadedBytes;
}
} }

View File

@ -12,7 +12,6 @@ import java.util.concurrent.ExecutorService;
import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLong;
public interface RecordingProcess extends Callable<RecordingProcess> { public interface RecordingProcess extends Callable<RecordingProcess> {
AtomicLong downloadedBytes = new AtomicLong();
void init(Config config, Model model, Instant startTime, ExecutorService executorService) throws IOException; void init(Config config, Model model, Instant startTime, ExecutorService executorService) throws IOException;
@ -62,8 +61,5 @@ public interface RecordingProcess extends Callable<RecordingProcess> {
void awaitEnd(); void awaitEnd();
default AtomicLong getDownloadedBytes() { AtomicLong getDownloadedBytes();
return downloadedBytes;
}
} }

View File

@ -44,7 +44,6 @@ import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorCompletionService; import java.util.concurrent.ExecutorCompletionService;
import java.util.concurrent.ExecutorService; import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future; import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicLong;
import static ctbrec.io.HttpConstants.*; import static ctbrec.io.HttpConstants.*;
import static java.nio.charset.StandardCharsets.UTF_8; import static java.nio.charset.StandardCharsets.UTF_8;
@ -70,7 +69,6 @@ public abstract class AbstractHlsDownload extends AbstractDownload {
private Instant beforeLastPlaylistRequest = Instant.EPOCH; private Instant beforeLastPlaylistRequest = Instant.EPOCH;
private int consecutivePlaylistTimeouts = 0; private int consecutivePlaylistTimeouts = 0;
private int consecutivePlaylistErrors = 0; private int consecutivePlaylistErrors = 0;
private final AtomicLong sizeInByte = new AtomicLong();
protected Instant lastSegmentDownload = Instant.MIN; protected Instant lastSegmentDownload = Instant.MIN;
private final List<Instant> segmentErrorTimestamps = new LinkedList<>(); private final List<Instant> segmentErrorTimestamps = new LinkedList<>();
@ -90,14 +88,13 @@ public abstract class AbstractHlsDownload extends AbstractDownload {
@Override @Override
public long getSizeInByte() { public long getSizeInByte() {
return sizeInByte.get(); return getDownloadedBytes().get();
} }
protected abstract OutputStream getSegmentOutputStream(Segment segment) throws IOException; protected abstract OutputStream getSegmentOutputStream(Segment segment) throws IOException;
protected void segmentDownloadFinished(SegmentDownload segmentDownload) { // NOSONAR protected void segmentDownloadFinished(SegmentDownload segmentDownload) { // NOSONAR
long bytes = getDownloadedBytes().addAndGet(segmentDownload.size()); getDownloadedBytes().addAndGet(segmentDownload.size());
sizeInByte.addAndGet(bytes);
lastSegmentDownload = Instant.now(); lastSegmentDownload = Instant.now();
} }

View File

@ -211,7 +211,7 @@ public class CamsodaModel extends AbstractModel {
} else { } else {
try { try {
List<StreamSource> sources = getStreamSources(); List<StreamSource> sources = getStreamSources();
LOG.debug("stream sources {}", sources); LOG.debug("{}:{} stream sources {}", getSite().getName(), getName(), sources);
if (sources.isEmpty()) { if (sources.isEmpty()) {
return new int[]{0, 0}; return new int[]{0, 0};
} else { } else {