@@ -227589,19 +227589,24 @@ public: | |||
} | |||
} | |||
imageSwapLock.enter(); | |||
const int lineStride = width * 3; | |||
const Image::BitmapData destData (*loadingImage, 0, 0, width, height, true); | |||
{ | |||
const int lineStride = width * 3; | |||
const ScopedLock sl (imageSwapLock); | |||
for (int i = 0; i < height; ++i) | |||
memcpy (destData.getLinePointer ((height - 1) - i), | |||
buffer + lineStride * i, | |||
lineStride); | |||
{ | |||
const Image::BitmapData destData (*loadingImage, 0, 0, width, height, true); | |||
imageNeedsFlipping = true; | |||
imageSwapLock.exit(); | |||
for (int i = 0; i < height; ++i) | |||
memcpy (destData.getLinePointer ((height - 1) - i), | |||
buffer + lineStride * i, | |||
lineStride); | |||
} | |||
callListeners (*loadingImage); | |||
imageNeedsFlipping = true; | |||
} | |||
if (listeners.size() > 0) | |||
callListeners (*loadingImage); | |||
sendChangeMessage (this); | |||
} | |||
@@ -227610,10 +227615,9 @@ public: | |||
{ | |||
if (imageNeedsFlipping) | |||
{ | |||
imageSwapLock.enter(); | |||
const ScopedLock sl (imageSwapLock); | |||
swapVariables (loadingImage, activeImage); | |||
imageNeedsFlipping = false; | |||
imageSwapLock.exit(); | |||
} | |||
RectanglePlacement rp (RectanglePlacement::centred); | |||
@@ -227646,7 +227650,7 @@ public: | |||
if (SUCCEEDED (hr)) | |||
{ | |||
fileWriter.CoCreateInstance (CLSID_FileWriter, CLSCTX_INPROC_SERVER); | |||
hr = fileWriter.CoCreateInstance (CLSID_FileWriter, CLSCTX_INPROC_SERVER); | |||
if (SUCCEEDED (hr)) | |||
{ | |||
@@ -245143,6 +245147,7 @@ END_JUCE_NAMESPACE | |||
NSViewComponentPeer* owner; | |||
NSNotificationCenter* notificationCenter; | |||
String* stringBeingComposed; | |||
bool textWasInserted; | |||
} | |||
- (JuceNSView*) initWithOwner: (NSViewComponentPeer*) owner withFrame: (NSRect) frame; | |||
@@ -245359,6 +245364,7 @@ END_JUCE_NAMESPACE | |||
[super initWithFrame: frame]; | |||
owner = owner_; | |||
stringBeingComposed = 0; | |||
textWasInserted = false; | |||
notificationCenter = [NSNotificationCenter defaultCenter]; | |||
@@ -245522,13 +245528,14 @@ END_JUCE_NAMESPACE | |||
- (void) keyDown: (NSEvent*) ev | |||
{ | |||
TextInputTarget* const target = owner->findCurrentTextInputTarget(); | |||
textWasInserted = false; | |||
if (target != 0) | |||
[self interpretKeyEvents: [NSArray arrayWithObject: ev]]; | |||
else | |||
deleteAndZero (stringBeingComposed); | |||
if (stringBeingComposed == 0 && (owner == 0 || ! owner->redirectKeyDown (ev))) | |||
if ((! textWasInserted) && (owner == 0 || ! owner->redirectKeyDown (ev))) | |||
[super keyDown: ev]; | |||
} | |||
@@ -245546,7 +245553,10 @@ END_JUCE_NAMESPACE | |||
TextInputTarget* const target = owner->findCurrentTextInputTarget(); | |||
if (target != 0) | |||
{ | |||
target->insertTextAtCaret (nsStringToJuce ([aString isKindOfClass: [NSAttributedString class]] ? [aString string] : aString)); | |||
textWasInserted = true; | |||
} | |||
} | |||
deleteAndZero (stringBeingComposed); | |||
@@ -245570,6 +245580,7 @@ END_JUCE_NAMESPACE | |||
const Range<int> currentHighlight (target->getHighlightedRegion()); | |||
target->insertTextAtCaret (*stringBeingComposed); | |||
target->setHighlightedRegion (currentHighlight.withLength (stringBeingComposed->length())); | |||
textWasInserted = true; | |||
} | |||
} | |||
@@ -245580,7 +245591,10 @@ END_JUCE_NAMESPACE | |||
TextInputTarget* const target = owner->findCurrentTextInputTarget(); | |||
if (target != 0) | |||
{ | |||
target->insertTextAtCaret (*stringBeingComposed); | |||
textWasInserted = true; | |||
} | |||
} | |||
deleteAndZero (stringBeingComposed); | |||
@@ -247726,6 +247740,9 @@ public: | |||
static void flashMenuBar (NSMenu* menu) | |||
{ | |||
if ([[menu title] isEqualToString: @"Apple"]) | |||
return; | |||
[menu retain]; | |||
const unichar f35Key = NSF35FunctionKey; | |||
@@ -247738,21 +247755,22 @@ public: | |||
[menu insertItem: item atIndex: [menu numberOfItems]]; | |||
[item release]; | |||
NSEvent* f35Event = [NSEvent keyEventWithType: NSKeyDown | |||
location: NSZeroPoint | |||
modifierFlags: NSCommandKeyMask | |||
timestamp: 0 | |||
windowNumber: 0 | |||
context: [NSGraphicsContext currentContext] | |||
characters: f35String | |||
charactersIgnoringModifiers: f35String | |||
isARepeat: NO | |||
keyCode: 0]; | |||
[menu performKeyEquivalent: f35Event]; | |||
if ([menu indexOfItem: item] >= 0) | |||
{ | |||
NSEvent* f35Event = [NSEvent keyEventWithType: NSKeyDown | |||
location: NSZeroPoint | |||
modifierFlags: NSCommandKeyMask | |||
timestamp: 0 | |||
windowNumber: 0 | |||
context: [NSGraphicsContext currentContext] | |||
characters: f35String | |||
charactersIgnoringModifiers: f35String | |||
isARepeat: NO | |||
keyCode: 0]; | |||
[menu performKeyEquivalent: f35Event]; | |||
[menu removeItem: item]; // (this throws if the item isn't actually in the menu) | |||
} | |||
[menu release]; | |||
} | |||
@@ -251841,7 +251859,8 @@ END_JUCE_NAMESPACE | |||
@public | |||
CameraDevice* owner; | |||
QTCameraDeviceInteral* internal; | |||
Time* firstRecordedTime; | |||
int64 firstPresentationTime; | |||
int64 averageTimeOffset; | |||
} | |||
- (QTCaptureCallbackDelegate*) initWithOwner: (CameraDevice*) owner internalDev: (QTCameraDeviceInteral*) d; | |||
@@ -252016,13 +252035,13 @@ END_JUCE_NAMESPACE | |||
[super init]; | |||
owner = owner_; | |||
internal = d; | |||
firstRecordedTime = 0; | |||
firstPresentationTime = 0; | |||
averageTimeOffset = 0; | |||
return self; | |||
} | |||
- (void) dealloc | |||
{ | |||
delete firstRecordedTime; | |||
[super dealloc]; | |||
} | |||
@@ -252045,10 +252064,28 @@ END_JUCE_NAMESPACE | |||
didOutputSampleBuffer: (QTSampleBuffer*) sampleBuffer | |||
fromConnection: (QTCaptureConnection*) connection | |||
{ | |||
if (firstRecordedTime == 0) | |||
const Time now (Time::getCurrentTime()); | |||
int64 presentationTime = ([sampleBuffer presentationTime].timeValue * 1000) / [sampleBuffer presentationTime].timeScale; | |||
#if MAC_OS_X_VERSION_MAX_ALLOWED >= MAC_OS_X_VERSION_10_5 | |||
NSNumber* hosttime = (NSNumber*) [sampleBuffer attributeForKey: QTSampleBufferHostTimeAttribute]; | |||
#else | |||
NSNumber* hosttime = (NSNumber*) [sampleBuffer attributeForKey: @"hostTime"]; | |||
#endif | |||
if (hosttime != nil) | |||
presentationTime = (int64) AudioConvertHostTimeToNanos ([hosttime unsignedLongLongValue]) / 1000000; | |||
const int64 timeDiff = now.toMilliseconds() - presentationTime - 50; | |||
if (firstPresentationTime == 0) | |||
{ | |||
firstPresentationTime = presentationTime; | |||
averageTimeOffset = timeDiff; | |||
} | |||
else | |||
{ | |||
const Time now (Time::getCurrentTime()); | |||
firstRecordedTime = new Time (now - RelativeTime (0.1)); | |||
averageTimeOffset = (averageTimeOffset * 120 + timeDiff * 8) / 128; | |||
} | |||
} | |||
@@ -252108,7 +252145,7 @@ void CameraDevice::startRecordingToFile (const File& file, int quality) | |||
stopRecording(); | |||
QTCameraDeviceInteral* const d = (QTCameraDeviceInteral*) internal; | |||
deleteAndZero (d->callbackDelegate->firstRecordedTime); | |||
d->callbackDelegate->firstPresentationTime = 0; | |||
file.deleteFile(); | |||
// In some versions of QT (e.g. on 10.5), if you record video without audio, the speed comes | |||
@@ -252145,8 +252182,8 @@ void CameraDevice::startRecordingToFile (const File& file, int quality) | |||
const Time CameraDevice::getTimeOfFirstRecordedFrame() const | |||
{ | |||
QTCameraDeviceInteral* const d = (QTCameraDeviceInteral*) internal; | |||
if (d->callbackDelegate->firstRecordedTime != 0) | |||
return *d->callbackDelegate->firstRecordedTime; | |||
if (d->callbackDelegate->firstPresentationTime != 0) | |||
return Time (d->callbackDelegate->firstPresentationTime + d->callbackDelegate->averageTimeOffset); | |||
return Time(); | |||
} | |||
@@ -39,7 +39,8 @@ END_JUCE_NAMESPACE | |||
@public | |||
CameraDevice* owner; | |||
QTCameraDeviceInteral* internal; | |||
Time* firstRecordedTime; | |||
int64 firstPresentationTime; | |||
int64 averageTimeOffset; | |||
} | |||
- (QTCaptureCallbackDelegate*) initWithOwner: (CameraDevice*) owner internalDev: (QTCameraDeviceInteral*) d; | |||
@@ -215,13 +216,13 @@ END_JUCE_NAMESPACE | |||
[super init]; | |||
owner = owner_; | |||
internal = d; | |||
firstRecordedTime = 0; | |||
firstPresentationTime = 0; | |||
averageTimeOffset = 0; | |||
return self; | |||
} | |||
- (void) dealloc | |||
{ | |||
delete firstRecordedTime; | |||
[super dealloc]; | |||
} | |||
@@ -244,10 +245,28 @@ END_JUCE_NAMESPACE | |||
didOutputSampleBuffer: (QTSampleBuffer*) sampleBuffer | |||
fromConnection: (QTCaptureConnection*) connection | |||
{ | |||
if (firstRecordedTime == 0) | |||
const Time now (Time::getCurrentTime()); | |||
int64 presentationTime = ([sampleBuffer presentationTime].timeValue * 1000) / [sampleBuffer presentationTime].timeScale; | |||
#if MAC_OS_X_VERSION_MAX_ALLOWED >= MAC_OS_X_VERSION_10_5 | |||
NSNumber* hosttime = (NSNumber*) [sampleBuffer attributeForKey: QTSampleBufferHostTimeAttribute]; | |||
#else | |||
NSNumber* hosttime = (NSNumber*) [sampleBuffer attributeForKey: @"hostTime"]; | |||
#endif | |||
if (hosttime != nil) | |||
presentationTime = (int64) AudioConvertHostTimeToNanos ([hosttime unsignedLongLongValue]) / 1000000; | |||
const int64 timeDiff = now.toMilliseconds() - presentationTime - 50; | |||
if (firstPresentationTime == 0) | |||
{ | |||
firstPresentationTime = presentationTime; | |||
averageTimeOffset = timeDiff; | |||
} | |||
else | |||
{ | |||
const Time now (Time::getCurrentTime()); | |||
firstRecordedTime = new Time (now - RelativeTime (0.1)); | |||
averageTimeOffset = (averageTimeOffset * 120 + timeDiff * 8) / 128; | |||
} | |||
} | |||
@@ -309,7 +328,7 @@ void CameraDevice::startRecordingToFile (const File& file, int quality) | |||
stopRecording(); | |||
QTCameraDeviceInteral* const d = (QTCameraDeviceInteral*) internal; | |||
deleteAndZero (d->callbackDelegate->firstRecordedTime); | |||
d->callbackDelegate->firstPresentationTime = 0; | |||
file.deleteFile(); | |||
// In some versions of QT (e.g. on 10.5), if you record video without audio, the speed comes | |||
@@ -346,8 +365,8 @@ void CameraDevice::startRecordingToFile (const File& file, int quality) | |||
const Time CameraDevice::getTimeOfFirstRecordedFrame() const | |||
{ | |||
QTCameraDeviceInteral* const d = (QTCameraDeviceInteral*) internal; | |||
if (d->callbackDelegate->firstRecordedTime != 0) | |||
return *d->callbackDelegate->firstRecordedTime; | |||
if (d->callbackDelegate->firstPresentationTime != 0) | |||
return Time (d->callbackDelegate->firstPresentationTime + d->callbackDelegate->averageTimeOffset); | |||
return Time(); | |||
} | |||
@@ -155,6 +155,9 @@ public: | |||
static void flashMenuBar (NSMenu* menu) | |||
{ | |||
if ([[menu title] isEqualToString: @"Apple"]) | |||
return; | |||
[menu retain]; | |||
const unichar f35Key = NSF35FunctionKey; | |||
@@ -167,21 +170,22 @@ public: | |||
[menu insertItem: item atIndex: [menu numberOfItems]]; | |||
[item release]; | |||
NSEvent* f35Event = [NSEvent keyEventWithType: NSKeyDown | |||
location: NSZeroPoint | |||
modifierFlags: NSCommandKeyMask | |||
timestamp: 0 | |||
windowNumber: 0 | |||
context: [NSGraphicsContext currentContext] | |||
characters: f35String | |||
charactersIgnoringModifiers: f35String | |||
isARepeat: NO | |||
keyCode: 0]; | |||
[menu performKeyEquivalent: f35Event]; | |||
if ([menu indexOfItem: item] >= 0) | |||
{ | |||
NSEvent* f35Event = [NSEvent keyEventWithType: NSKeyDown | |||
location: NSZeroPoint | |||
modifierFlags: NSCommandKeyMask | |||
timestamp: 0 | |||
windowNumber: 0 | |||
context: [NSGraphicsContext currentContext] | |||
characters: f35String | |||
charactersIgnoringModifiers: f35String | |||
isARepeat: NO | |||
keyCode: 0]; | |||
[menu performKeyEquivalent: f35Event]; | |||
[menu removeItem: item]; // (this throws if the item isn't actually in the menu) | |||
} | |||
[menu release]; | |||
} | |||
@@ -40,6 +40,7 @@ END_JUCE_NAMESPACE | |||
NSViewComponentPeer* owner; | |||
NSNotificationCenter* notificationCenter; | |||
String* stringBeingComposed; | |||
bool textWasInserted; | |||
} | |||
- (JuceNSView*) initWithOwner: (NSViewComponentPeer*) owner withFrame: (NSRect) frame; | |||
@@ -273,6 +274,7 @@ END_JUCE_NAMESPACE | |||
[super initWithFrame: frame]; | |||
owner = owner_; | |||
stringBeingComposed = 0; | |||
textWasInserted = false; | |||
notificationCenter = [NSNotificationCenter defaultCenter]; | |||
@@ -439,13 +441,14 @@ END_JUCE_NAMESPACE | |||
- (void) keyDown: (NSEvent*) ev | |||
{ | |||
TextInputTarget* const target = owner->findCurrentTextInputTarget(); | |||
textWasInserted = false; | |||
if (target != 0) | |||
[self interpretKeyEvents: [NSArray arrayWithObject: ev]]; | |||
else | |||
deleteAndZero (stringBeingComposed); | |||
if (stringBeingComposed == 0 && (owner == 0 || ! owner->redirectKeyDown (ev))) | |||
if ((! textWasInserted) && (owner == 0 || ! owner->redirectKeyDown (ev))) | |||
[super keyDown: ev]; | |||
} | |||
@@ -464,7 +467,10 @@ END_JUCE_NAMESPACE | |||
TextInputTarget* const target = owner->findCurrentTextInputTarget(); | |||
if (target != 0) | |||
{ | |||
target->insertTextAtCaret (nsStringToJuce ([aString isKindOfClass: [NSAttributedString class]] ? [aString string] : aString)); | |||
textWasInserted = true; | |||
} | |||
} | |||
deleteAndZero (stringBeingComposed); | |||
@@ -488,6 +494,7 @@ END_JUCE_NAMESPACE | |||
const Range<int> currentHighlight (target->getHighlightedRegion()); | |||
target->insertTextAtCaret (*stringBeingComposed); | |||
target->setHighlightedRegion (currentHighlight.withLength (stringBeingComposed->length())); | |||
textWasInserted = true; | |||
} | |||
} | |||
@@ -498,7 +505,10 @@ END_JUCE_NAMESPACE | |||
TextInputTarget* const target = owner->findCurrentTextInputTarget(); | |||
if (target != 0) | |||
{ | |||
target->insertTextAtCaret (*stringBeingComposed); | |||
textWasInserted = true; | |||
} | |||
} | |||
deleteAndZero (stringBeingComposed); | |||
@@ -210,19 +210,24 @@ public: | |||
} | |||
} | |||
imageSwapLock.enter(); | |||
const int lineStride = width * 3; | |||
const Image::BitmapData destData (*loadingImage, 0, 0, width, height, true); | |||
{ | |||
const int lineStride = width * 3; | |||
const ScopedLock sl (imageSwapLock); | |||
{ | |||
const Image::BitmapData destData (*loadingImage, 0, 0, width, height, true); | |||
for (int i = 0; i < height; ++i) | |||
memcpy (destData.getLinePointer ((height - 1) - i), | |||
buffer + lineStride * i, | |||
lineStride); | |||
for (int i = 0; i < height; ++i) | |||
memcpy (destData.getLinePointer ((height - 1) - i), | |||
buffer + lineStride * i, | |||
lineStride); | |||
} | |||
imageNeedsFlipping = true; | |||
imageSwapLock.exit(); | |||
imageNeedsFlipping = true; | |||
} | |||
callListeners (*loadingImage); | |||
if (listeners.size() > 0) | |||
callListeners (*loadingImage); | |||
sendChangeMessage (this); | |||
} | |||
@@ -231,10 +236,9 @@ public: | |||
{ | |||
if (imageNeedsFlipping) | |||
{ | |||
imageSwapLock.enter(); | |||
const ScopedLock sl (imageSwapLock); | |||
swapVariables (loadingImage, activeImage); | |||
imageNeedsFlipping = false; | |||
imageSwapLock.exit(); | |||
} | |||
RectanglePlacement rp (RectanglePlacement::centred); | |||
@@ -267,7 +271,7 @@ public: | |||
if (SUCCEEDED (hr)) | |||
{ | |||
fileWriter.CoCreateInstance (CLSID_FileWriter, CLSCTX_INPROC_SERVER); | |||
hr = fileWriter.CoCreateInstance (CLSID_FileWriter, CLSCTX_INPROC_SERVER); | |||
if (SUCCEEDED (hr)) | |||
{ | |||
@@ -378,6 +382,7 @@ public: | |||
} | |||
} | |||
//============================================================================== | |||
class DShowCaptureViewerComp : public Component, | |||
public ChangeListener | |||