Merge branch 'charlesmchen/voiceMemos'

pull/1/head
Matthew Chen 8 years ago
commit 6a4a08d3ee

@ -22,6 +22,7 @@
344F2F671E57A932000D9322 /* UIViewController+OWS.m in Sources */ = {isa = PBXBuildFile; fileRef = 344F2F661E57A932000D9322 /* UIViewController+OWS.m */; };
34533F181EA8D2070006114F /* OWSAudioAttachmentPlayer.m in Sources */ = {isa = PBXBuildFile; fileRef = 34533F171EA8D2070006114F /* OWSAudioAttachmentPlayer.m */; };
34535D821E256BE9008A4747 /* UIView+OWS.m in Sources */ = {isa = PBXBuildFile; fileRef = 34535D811E256BE9008A4747 /* UIView+OWS.m */; };
3453D8EA1EC0D4ED003F9E6F /* OWSAlerts.swift in Sources */ = {isa = PBXBuildFile; fileRef = 3453D8E91EC0D4ED003F9E6F /* OWSAlerts.swift */; };
345671011E89A5F1006EE662 /* ThreadUtil.m in Sources */ = {isa = PBXBuildFile; fileRef = 345671001E89A5F1006EE662 /* ThreadUtil.m */; };
3456710A1E8A9F5D006EE662 /* TSGenericAttachmentAdapter.m in Sources */ = {isa = PBXBuildFile; fileRef = 345671091E8A9F5D006EE662 /* TSGenericAttachmentAdapter.m */; };
3471B1DA1EB7C63600F6AEC8 /* NewNonContactConversationViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 3471B1D91EB7C63600F6AEC8 /* NewNonContactConversationViewController.m */; };
@ -379,6 +380,7 @@
34533F171EA8D2070006114F /* OWSAudioAttachmentPlayer.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = OWSAudioAttachmentPlayer.m; sourceTree = "<group>"; };
34535D801E256BE9008A4747 /* UIView+OWS.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = "UIView+OWS.h"; sourceTree = "<group>"; };
34535D811E256BE9008A4747 /* UIView+OWS.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = "UIView+OWS.m"; sourceTree = "<group>"; };
3453D8E91EC0D4ED003F9E6F /* OWSAlerts.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = OWSAlerts.swift; sourceTree = "<group>"; };
345670FF1E89A5F1006EE662 /* ThreadUtil.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = ThreadUtil.h; sourceTree = "<group>"; };
345671001E89A5F1006EE662 /* ThreadUtil.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = ThreadUtil.m; sourceTree = "<group>"; };
345671081E8A9F5D006EE662 /* TSGenericAttachmentAdapter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = TSGenericAttachmentAdapter.h; sourceTree = "<group>"; };
@ -1289,6 +1291,7 @@
76EB052B18170B33006006FC /* Views */ = {
isa = PBXGroup;
children = (
452EA09D1EA7ABE00078744B /* AttachmentPointerView.swift */,
45F3AEB51DFDE7900080CE33 /* AvatarImageView.swift */,
451764291DE939FD00EDB8B9 /* ContactCell.swift */,
451764281DE939FD00EDB8B9 /* ContactCell.xib */,
@ -1299,6 +1302,7 @@
45B201751DAECBFE00C461E0 /* HighlightableLabel.swift */,
4531C9C21DD8E6D800F08304 /* JSQMessagesCollectionViewCell+OWS.h */,
4531C9C31DD8E6D800F08304 /* JSQMessagesCollectionViewCell+OWS.m */,
3453D8E91EC0D4ED003F9E6F /* OWSAlerts.swift */,
45C681B91D305C080050903A /* OWSCallCollectionViewCell.h */,
45C681BA1D305C080050903A /* OWSCallCollectionViewCell.m */,
45C681C01D305C9E0050903A /* OWSCallCollectionViewCell.xib */,
@ -1319,7 +1323,6 @@
45F2B1961D9CA207000D2C69 /* OWSOutgoingMessageCollectionViewCell.xib */,
34330AA11E79686200DF2FB9 /* OWSProgressView.h */,
34330AA21E79686200DF2FB9 /* OWSProgressView.m */,
452EA09D1EA7ABE00078744B /* AttachmentPointerView.swift */,
45A6DAD51EBBF85500893231 /* ReminderView.swift */,
);
name = Views;
@ -2094,6 +2097,7 @@
34B3F8851E8DF1700035BE1A /* NewGroupViewController.m in Sources */,
B6C93C4E199567AD00EDF894 /* DebugLogger.m in Sources */,
34B3F8821E8DF1700035BE1A /* MessageComposeTableViewController.m in Sources */,
3453D8EA1EC0D4ED003F9E6F /* OWSAlerts.swift in Sources */,
45F659821E1BE77000444429 /* NonCallKitCallUIAdaptee.swift in Sources */,
45AE48511E0732D6004D96C2 /* TurnServerInfo.swift in Sources */,
34B3F8771E8DF1700035BE1A /* ContactsPicker.swift in Sources */,

@ -0,0 +1,23 @@
{
"images" : [
{
"idiom" : "universal",
"filename" : "voice-memo-button-32.png",
"scale" : "1x"
},
{
"idiom" : "universal",
"filename" : "voice-memo-button-64.png",
"scale" : "2x"
},
{
"idiom" : "universal",
"filename" : "voice-memo-button-96.png",
"scale" : "3x"
}
],
"info" : {
"version" : 1,
"author" : "xcode"
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.4 KiB

@ -270,25 +270,23 @@ static NSString *const kURLHostVerifyPrefix = @"verify";
NSString *filename = url.lastPathComponent;
if ([filename stringByDeletingPathExtension].length < 1) {
DDLogError(@"Application opened with URL invalid filename: %@", url);
[ViewControllerUtils
showAlertWithTitle:
NSLocalizedString(@"EXPORT_WITH_SIGNAL_ERROR_TITLE",
@"Title for the alert indicating the 'export with signal' attachment had an error.")
message:NSLocalizedString(@"EXPORT_WITH_SIGNAL_ERROR_MESSAGE_INVALID_FILENAME",
@"Message for the alert indicating the 'export with signal' file had an "
@"invalid filename.")];
[OWSAlerts showAlertWithTitle:
NSLocalizedString(@"EXPORT_WITH_SIGNAL_ERROR_TITLE",
@"Title for the alert indicating the 'export with signal' attachment had an error.")
message:NSLocalizedString(@"EXPORT_WITH_SIGNAL_ERROR_MESSAGE_INVALID_FILENAME",
@"Message for the alert indicating the 'export with signal' file had an "
@"invalid filename.")];
return NO;
}
NSString *fileExtension = [filename pathExtension];
if (fileExtension.length < 1) {
DDLogError(@"Application opened with URL missing file extension: %@", url);
[ViewControllerUtils
showAlertWithTitle:
NSLocalizedString(@"EXPORT_WITH_SIGNAL_ERROR_TITLE",
@"Title for the alert indicating the 'export with signal' attachment had an error.")
message:NSLocalizedString(@"EXPORT_WITH_SIGNAL_ERROR_MESSAGE_UNKNOWN_TYPE",
@"Message for the alert indicating the 'export with signal' file had "
@"unknown type.")];
[OWSAlerts showAlertWithTitle:
NSLocalizedString(@"EXPORT_WITH_SIGNAL_ERROR_TITLE",
@"Title for the alert indicating the 'export with signal' attachment had an error.")
message:NSLocalizedString(@"EXPORT_WITH_SIGNAL_ERROR_MESSAGE_UNKNOWN_TYPE",
@"Message for the alert indicating the 'export with signal' file had "
@"unknown type.")];
return NO;
}
@ -320,7 +318,7 @@ static NSString *const kURLHostVerifyPrefix = @"verify";
} else if ([isDirectory boolValue]) {
DDLogInfo(@"%@ User picked directory at url: %@", self.tag, url);
DDLogError(@"Application opened with URL of unknown UTI type: %@", url);
[ViewControllerUtils
[OWSAlerts
showAlertWithTitle:
NSLocalizedString(@"ATTACHMENT_PICKER_DOCUMENTS_PICKED_DIRECTORY_FAILED_ALERT_TITLE",
@"Alert title when picking a document fails because user picked a directory/bundle")
@ -333,34 +331,31 @@ static NSString *const kURLHostVerifyPrefix = @"verify";
NSData *data = [NSData dataWithContentsOfURL:url];
if (!data) {
DDLogError(@"Application opened with URL with unloadable content: %@", url);
[ViewControllerUtils
showAlertWithTitle:
NSLocalizedString(@"EXPORT_WITH_SIGNAL_ERROR_TITLE",
@"Title for the alert indicating the 'export with signal' attachment had an error.")
message:NSLocalizedString(@"EXPORT_WITH_SIGNAL_ERROR_MESSAGE_MISSING_DATA",
@"Message for the alert indicating the 'export with signal' data "
@"couldn't be loaded.")];
[OWSAlerts showAlertWithTitle:
NSLocalizedString(@"EXPORT_WITH_SIGNAL_ERROR_TITLE",
@"Title for the alert indicating the 'export with signal' attachment had an error.")
message:NSLocalizedString(@"EXPORT_WITH_SIGNAL_ERROR_MESSAGE_MISSING_DATA",
@"Message for the alert indicating the 'export with signal' data "
@"couldn't be loaded.")];
return NO;
}
SignalAttachment *attachment = [SignalAttachment attachmentWithData:data dataUTI:utiType filename:filename];
if (!attachment) {
DDLogError(@"Application opened with URL with invalid content: %@", url);
[ViewControllerUtils
showAlertWithTitle:
NSLocalizedString(@"EXPORT_WITH_SIGNAL_ERROR_TITLE",
@"Title for the alert indicating the 'export with signal' attachment had an error.")
message:NSLocalizedString(@"EXPORT_WITH_SIGNAL_ERROR_MESSAGE_MISSING_ATTACHMENT",
@"Message for the alert indicating the 'export with signal' attachment "
@"couldn't be loaded.")];
[OWSAlerts showAlertWithTitle:
NSLocalizedString(@"EXPORT_WITH_SIGNAL_ERROR_TITLE",
@"Title for the alert indicating the 'export with signal' attachment had an error.")
message:NSLocalizedString(@"EXPORT_WITH_SIGNAL_ERROR_MESSAGE_MISSING_ATTACHMENT",
@"Message for the alert indicating the 'export with signal' attachment "
@"couldn't be loaded.")];
return NO;
}
if ([attachment hasError]) {
DDLogError(@"Application opened with URL with content error: %@ %@", url, [attachment errorName]);
[ViewControllerUtils
showAlertWithTitle:
NSLocalizedString(@"EXPORT_WITH_SIGNAL_ERROR_TITLE",
@"Title for the alert indicating the 'export with signal' attachment had an error.")
message:[attachment errorName]];
[OWSAlerts showAlertWithTitle:
NSLocalizedString(@"EXPORT_WITH_SIGNAL_ERROR_TITLE",
@"Title for the alert indicating the 'export with signal' attachment had an error.")
message:[attachment errorName]];
return NO;
}
DDLogInfo(@"Application opened with URL: %@", url);

@ -10,6 +10,7 @@
#import <AssetsLibrary/AssetsLibrary.h>
#import <JSQMessagesViewController/JSQMessagesMediaViewBubbleImageMasker.h>
#import <MobileCoreServices/MobileCoreServices.h>
#import <SignalServiceKit/MIMETypeUtil.h>
NS_ASSUME_NONNULL_BEGIN
@ -131,8 +132,14 @@ NS_ASSUME_NONNULL_BEGIN
- (void)performEditingAction:(SEL)action
{
if (action == @selector(copy:)) {
NSString *utiType = [MIMETypeUtil utiTypeForMIMEType:self.attachment.contentType];
if (!utiType) {
OWSAssert(0);
utiType = (NSString *)kUTTypeGIF;
}
UIPasteboard *pasteboard = UIPasteboard.generalPasteboard;
[pasteboard setData:self.fileData forPasteboardType:(__bridge NSString *)kUTTypeGIF];
[pasteboard setData:self.fileData forPasteboardType:utiType];
} else if (action == NSSelectorFromString(@"save:")) {
NSData *photoData = self.fileData;
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];

@ -8,6 +8,7 @@
#import "TSAttachmentStream.h"
#import <JSQMessagesViewController/JSQMessagesMediaViewBubbleImageMasker.h>
#import <MobileCoreServices/MobileCoreServices.h>
#import <SignalServiceKit/MimeTypeUtil.h>
NS_ASSUME_NONNULL_BEGIN
@ -116,18 +117,14 @@ NS_ASSUME_NONNULL_BEGIN
if (action == @selector(copy:)) {
// We should always copy to the pasteboard as data, not an UIImage.
// The pasteboard should has as specific as UTI type as possible and
// The pasteboard should have as specific as UTI type as possible and
// data support should be far more general than UIImage support.
OWSAssert(self.attachment.filePath.length > 0);
NSString *fileExtension = [self.attachment.filePath pathExtension];
NSArray *utiTypes = (__bridge_transfer NSArray *)UTTypeCreateAllIdentifiersForTag(
kUTTagClassFilenameExtension, (__bridge CFStringRef)fileExtension, (CFStringRef) @"public.image");
NSString *utiType = (NSString *)kUTTypeImage;
OWSAssert(utiTypes.count > 0);
if (utiTypes.count > 0) {
utiType = utiTypes[0];
}
NSString *utiType = [MIMETypeUtil utiTypeForMIMEType:self.attachment.contentType];
if (!utiType) {
OWSAssert(0);
utiType = (NSString *)kUTTypeImage;
}
NSData *data = [NSData dataWithContentsOfURL:self.attachment.mediaURL];
[UIPasteboard.generalPasteboard setData:data forPasteboardType:utiType];
return;

@ -297,9 +297,13 @@ NS_ASSUME_NONNULL_BEGIN
{
if ([self isVideo]) {
if (action == @selector(copy:)) {
NSString *utiType = [MIMETypeUtil utiTypeForMIMEType:_contentType];
if (!utiType) {
OWSAssert(0);
utiType = (NSString *)kUTTypeVideo;
}
NSData *data = [NSData dataWithContentsOfURL:self.fileURL];
// TODO: This assumes all videos are mp4.
[UIPasteboard.generalPasteboard setData:data forPasteboardType:(NSString *)kUTTypeMPEG4];
[UIPasteboard.generalPasteboard setData:data forPasteboardType:utiType];
return;
} else if (action == NSSelectorFromString(@"save:")) {
if (UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(self.fileURL.path)) {
@ -310,22 +314,30 @@ NS_ASSUME_NONNULL_BEGIN
}
} else if ([self isAudio]) {
if (action == @selector(copy:)) {
NSData *data = [NSData dataWithContentsOfURL:self.fileURL];
NSString *pasteboardType = [MIMETypeUtil getSupportedExtensionFromAudioMIMEType:self.contentType];
if ([pasteboardType isEqualToString:@"mp3"]) {
[UIPasteboard.generalPasteboard setData:data forPasteboardType:(NSString *)kUTTypeMP3];
} else if ([pasteboardType isEqualToString:@"aiff"]) {
[UIPasteboard.generalPasteboard setData:data
forPasteboardType:(NSString *)kUTTypeAudioInterchangeFileFormat];
} else if ([pasteboardType isEqualToString:@"m4a"]) {
[UIPasteboard.generalPasteboard setData:data forPasteboardType:(NSString *)kUTTypeMPEG4Audio];
} else if ([pasteboardType isEqualToString:@"amr"]) {
[UIPasteboard.generalPasteboard setData:data forPasteboardType:@"org.3gpp.adaptive-multi-rate-audio"];
} else {
[UIPasteboard.generalPasteboard setData:data forPasteboardType:(NSString *)kUTTypeAudio];
NSString *utiType = [MIMETypeUtil utiTypeForMIMEType:_contentType];
if (!utiType) {
if ([_contentType isEqualToString:@"audio/amr"]) {
utiType = @"org.3gpp.adaptive-multi-rate-audio";
} else if ([_contentType isEqualToString:@"audio/mp3"] ||
[_contentType isEqualToString:@"audio/x-mpeg"] || [_contentType isEqualToString:@"audio/mpeg"] ||
[_contentType isEqualToString:@"audio/mpeg3"] || [_contentType isEqualToString:@"audio/x-mp3"] ||
[_contentType isEqualToString:@"audio/x-mpeg3"]) {
utiType = (NSString *)kUTTypeMP3;
} else if ([_contentType isEqualToString:@"audio/aac"] ||
[_contentType isEqualToString:@"audio/x-m4a"]) {
utiType = (NSString *)kUTTypeMPEG4Audio;
} else if ([_contentType isEqualToString:@"audio/aiff"] ||
[_contentType isEqualToString:@"audio/x-aiff"]) {
utiType = (NSString *)kUTTypeAudioInterchangeFileFormat;
} else {
OWSAssert(0);
utiType = (NSString *)kUTTypeAudio;
}
}
NSData *data = [NSData dataWithContentsOfURL:self.fileURL];
OWSAssert(data);
[UIPasteboard.generalPasteboard setData:data forPasteboardType:utiType];
}
} else {
// Shouldn't get here, as only supported actions should be exposed via canPerformEditingAction

@ -31,7 +31,6 @@ extern NSString *const OWSMessagesViewControllerDidAppearNotification;
@interface MessagesViewController : JSQMessagesViewController <UIImagePickerControllerDelegate,
UINavigationControllerDelegate,
UITextViewDelegate,
AVAudioRecorderDelegate,
AVAudioPlayerDelegate,
UIGestureRecognizerDelegate>

@ -12,6 +12,7 @@
#import "FingerprintViewController.h"
#import "FullImageViewController.h"
#import "NSDate+millisecondTimeStamp.h"
#import "NSTimer+OWS.h"
#import "NewGroupViewController.h"
#import "OWSAudioAttachmentPlayer.h"
#import "OWSCall.h"
@ -110,7 +111,8 @@ typedef enum : NSUInteger {
@implementation OWSMessagesComposerTextView
- (BOOL)canBecomeFirstResponder {
- (BOOL)canBecomeFirstResponder
{
return YES;
}
@ -121,7 +123,8 @@ typedef enum : NSUInteger {
return ([SignalAttachment pasteboardHasPossibleAttachment] && ![SignalAttachment pasteboardHasText]);
}
- (BOOL)canPerformAction:(SEL)action withSender:(id)sender {
- (BOOL)canPerformAction:(SEL)action withSender:(id)sender
{
if (action == @selector(paste:)) {
if ([self pasteboardHasPossibleAttachment]) {
return YES;
@ -130,14 +133,15 @@ typedef enum : NSUInteger {
return [super canPerformAction:action withSender:sender];
}
- (void)paste:(id)sender {
- (void)paste:(id)sender
{
if ([self pasteboardHasPossibleAttachment]) {
SignalAttachment *attachment = [SignalAttachment attachmentFromPasteboard];
// Note: attachment might be nil or have an error at this point; that's fine.
[self.textViewPasteDelegate didPasteAttachment:attachment];
return;
}
[super paste:sender];
}
@ -145,6 +149,38 @@ typedef enum : NSUInteger {
#pragma mark -
@protocol OWSMessagesToolbarContentDelegate <NSObject>
- (void)voiceMemoGestureDidStart;
- (void)voiceMemoGestureDidEnd;
- (void)voiceMemoGestureDidCancel;
- (void)voiceMemoGestureDidChange:(CGFloat)cancelAlpha;
@end
#pragma mark -
@interface OWSMessagesToolbarContentView () <UIGestureRecognizerDelegate>
@property (nonatomic, nullable, weak) id<OWSMessagesToolbarContentDelegate> delegate;
@property (nonatomic) BOOL shouldShowVoiceMemoButton;
@property (nonatomic, nullable) UIButton *voiceMemoButton;
@property (nonatomic, nullable) UIButton *sendButton;
@property (nonatomic) BOOL isRecordingVoiceMemo;
@property (nonatomic) CGPoint voiceMemoGestureStartLocation;
@end
#pragma mark -
@implementation OWSMessagesToolbarContentView
#pragma mark - Class methods
@ -155,12 +191,175 @@ typedef enum : NSUInteger {
bundle:[NSBundle bundleForClass:[OWSMessagesToolbarContentView class]]];
}
- (void)ensureSubviews
{
if (!self.sendButton) {
OWSAssert(self.rightBarButtonItem);
self.sendButton = self.rightBarButtonItem;
}
if (!self.voiceMemoButton) {
UIImage *icon = [UIImage imageNamed:@"voice-memo-button"];
OWSAssert(icon);
UIButton *button = [UIButton buttonWithType:UIButtonTypeCustom];
[button setImage:[icon imageWithRenderingMode:UIImageRenderingModeAlwaysTemplate]
forState:UIControlStateNormal];
button.imageView.tintColor = [UIColor ows_materialBlueColor];
// We want to be permissive about the voice message gesture, so we:
//
// * Add the gesture recognizer to the button's superview instead of the button.
// * Filter the touches that the gesture recognizer receives by serving as its
// delegate.
UILongPressGestureRecognizer *longPressGestureRecognizer =
[[UILongPressGestureRecognizer alloc] initWithTarget:self action:@selector(handleLongPress:)];
longPressGestureRecognizer.minimumPressDuration = 0;
longPressGestureRecognizer.delegate = self;
[self addGestureRecognizer:longPressGestureRecognizer];
self.userInteractionEnabled = YES;
self.voiceMemoButton = button;
}
[self ensureShouldShowVoiceMemoButton];
[self ensureVoiceMemoButton];
}
- (void)ensureEnabling
{
[self ensureShouldShowVoiceMemoButton];
OWSAssert(self.voiceMemoButton.isEnabled == YES);
OWSAssert(self.sendButton.isEnabled == YES);
}
- (void)ensureShouldShowVoiceMemoButton
{
self.shouldShowVoiceMemoButton = self.textView.text.length < 1;
}
- (void)setShouldShowVoiceMemoButton:(BOOL)shouldShowVoiceMemoButton
{
if (_shouldShowVoiceMemoButton == shouldShowVoiceMemoButton) {
return;
}
_shouldShowVoiceMemoButton = shouldShowVoiceMemoButton;
[self ensureVoiceMemoButton];
}
- (void)ensureVoiceMemoButton
{
if (self.shouldShowVoiceMemoButton) {
self.rightBarButtonItem = self.voiceMemoButton;
self.rightBarButtonItemWidth = [self.voiceMemoButton sizeThatFits:CGSizeZero].width;
} else {
self.rightBarButtonItem = self.sendButton;
self.rightBarButtonItemWidth = [self.sendButton sizeThatFits:CGSizeZero].width;
}
}
- (void)handleLongPress:(UIGestureRecognizer *)sender
{
switch (sender.state) {
case UIGestureRecognizerStatePossible:
case UIGestureRecognizerStateCancelled:
case UIGestureRecognizerStateFailed:
if (self.isRecordingVoiceMemo) {
// Cancel voice message if necessary.
self.isRecordingVoiceMemo = NO;
[self.delegate voiceMemoGestureDidCancel];
}
break;
case UIGestureRecognizerStateBegan:
if (self.isRecordingVoiceMemo) {
// Cancel voice message if necessary.
self.isRecordingVoiceMemo = NO;
[self.delegate voiceMemoGestureDidCancel];
}
// Start voice message.
[self.textView resignFirstResponder];
self.isRecordingVoiceMemo = YES;
self.voiceMemoGestureStartLocation = [sender locationInView:self];
[self.delegate voiceMemoGestureDidStart];
break;
case UIGestureRecognizerStateChanged:
if (self.isRecordingVoiceMemo) {
// Check for "slide to cancel" gesture.
CGPoint location = [sender locationInView:self];
CGFloat offset = MAX(0, self.voiceMemoGestureStartLocation.x - location.x);
// The lower this value, the easier it is to cancel by accident.
// The higher this value, the harder it is to cancel.
const CGFloat kCancelOffsetPoints = 100.f;
CGFloat cancelAlpha = offset / kCancelOffsetPoints;
BOOL isCancelled = cancelAlpha >= 1.f;
if (isCancelled) {
self.isRecordingVoiceMemo = NO;
[self.delegate voiceMemoGestureDidCancel];
} else {
[self.delegate voiceMemoGestureDidChange:cancelAlpha];
}
}
break;
case UIGestureRecognizerStateEnded:
if (self.isRecordingVoiceMemo) {
// End voice message.
self.isRecordingVoiceMemo = NO;
[self.delegate voiceMemoGestureDidEnd];
}
break;
}
}
- (void)cancelVoiceMemoIfNecessary
{
if (self.isRecordingVoiceMemo) {
self.isRecordingVoiceMemo = NO;
}
}
#pragma mark - UIGestureRecognizerDelegate
- (BOOL)gestureRecognizer:(UIGestureRecognizer *)gestureRecognizer shouldReceiveTouch:(UITouch *)touch
{
// We want to be permissive about the voice message gesture, so we accept
// gesture that begin within N points of the
CGFloat kVoiceMemoGestureTolerancePoints = 10;
CGPoint location = [touch locationInView:self.voiceMemoButton];
CGRect hitTestRect = CGRectInset(
self.voiceMemoButton.bounds, -kVoiceMemoGestureTolerancePoints, -kVoiceMemoGestureTolerancePoints);
return CGRectContainsPoint(hitTestRect, location);
}
@end
#pragma mark -
@interface OWSMessagesInputToolbar ()
@property (nonatomic) UIView *voiceMemoUI;
@property (nonatomic) NSDate *voiceMemoStartTime;
@property (nonatomic) NSTimer *voiceMemoUpdateTimer;
@property (nonatomic) UILabel *recordingLabel;
@end
#pragma mark -
@implementation OWSMessagesInputToolbar
- (void)toggleSendButtonEnabled
{
// Do nothing; disables JSQ's control over send button enabling.
// Overrides a method in JSQMessagesInputToolbar.
}
- (JSQMessagesToolbarContentView *)loadToolbarContentView {
NSArray *views = [[OWSMessagesToolbarContentView nib] instantiateWithOwner:nil
options:nil];
@ -170,12 +369,145 @@ typedef enum : NSUInteger {
return view;
}
- (void)showVoiceMemoUI
{
OWSAssert([NSThread isMainThread]);
self.voiceMemoStartTime = [NSDate date];
[self.voiceMemoUI removeFromSuperview];
self.voiceMemoUI = [UIView new];
self.voiceMemoUI.userInteractionEnabled = NO;
self.voiceMemoUI.backgroundColor = [UIColor whiteColor];
[self addSubview:self.voiceMemoUI];
self.voiceMemoUI.frame = CGRectMake(0, 0, self.bounds.size.width, self.bounds.size.height);
self.recordingLabel = [UILabel new];
self.recordingLabel.textColor = [UIColor ows_materialBlueColor];
self.recordingLabel.font = [UIFont ows_mediumFontWithSize:14.f];
[self.voiceMemoUI addSubview:self.recordingLabel];
[self updateVoiceMemo];
UIImage *icon = [UIImage imageNamed:@"voice-memo-button"];
OWSAssert(icon);
UIImageView *imageView =
[[UIImageView alloc] initWithImage:[icon imageWithRenderingMode:UIImageRenderingModeAlwaysTemplate]];
imageView.tintColor = [UIColor ows_materialBlueColor];
[self.voiceMemoUI addSubview:imageView];
UILabel *cancelLabel = [UILabel new];
cancelLabel.textColor = [UIColor ows_destructiveRedColor];
cancelLabel.font = [UIFont ows_mediumFontWithSize:14.f];
cancelLabel.text
= NSLocalizedString(@"VOICE_MESSAGE_CANCEL_INSTRUCTIONS", @"Indicates how to cancel a voice message.");
[self.voiceMemoUI addSubview:cancelLabel];
[imageView autoVCenterInSuperview];
[imageView autoPinEdgeToSuperviewEdge:ALEdgeLeft withInset:10];
[self.recordingLabel autoVCenterInSuperview];
[self.recordingLabel autoPinEdge:ALEdgeLeft toEdge:ALEdgeRight ofView:imageView withOffset:5.f];
[cancelLabel autoVCenterInSuperview];
[cancelLabel autoHCenterInSuperview];
[self.voiceMemoUI setNeedsLayout];
[self.voiceMemoUI layoutSubviews];
// Slide in the "slide to cancel" label.
CGRect cancelLabelStartFrame = cancelLabel.frame;
CGRect cancelLabelEndFrame = cancelLabel.frame;
cancelLabelStartFrame.origin.x = self.voiceMemoUI.bounds.size.width;
cancelLabel.frame = cancelLabelStartFrame;
[UIView animateWithDuration:0.35f
delay:0.f
options:UIViewAnimationOptionCurveEaseOut
animations:^{
cancelLabel.frame = cancelLabelEndFrame;
}
completion:nil];
// Pulse the icon.
imageView.layer.opacity = 1.f;
[UIView animateWithDuration:0.5f
delay:0.2f
options:UIViewAnimationOptionRepeat | UIViewAnimationOptionAutoreverse
| UIViewAnimationOptionCurveEaseIn
animations:^{
imageView.layer.opacity = 0.f;
}
completion:nil];
// Fade in the view.
self.voiceMemoUI.layer.opacity = 0.f;
[UIView animateWithDuration:0.2f
animations:^{
self.voiceMemoUI.layer.opacity = 1.f;
}
completion:^(BOOL finished) {
if (finished) {
self.voiceMemoUI.layer.opacity = 1.f;
}
}];
self.voiceMemoUpdateTimer = [NSTimer weakScheduledTimerWithTimeInterval:0.1f
target:self
selector:@selector(updateVoiceMemo)
userInfo:nil
repeats:YES];
}
- (void)hideVoiceMemoUI:(BOOL)animated
{
OWSAssert([NSThread isMainThread]);
UIView *oldVoiceMemoUI = self.voiceMemoUI;
self.voiceMemoUI = nil;
NSTimer *voiceMemoUpdateTimer = self.voiceMemoUpdateTimer;
self.voiceMemoUpdateTimer = nil;
[oldVoiceMemoUI.layer removeAllAnimations];
if (animated) {
[UIView animateWithDuration:0.35f
animations:^{
oldVoiceMemoUI.layer.opacity = 0.f;
}
completion:^(BOOL finished) {
[oldVoiceMemoUI removeFromSuperview];
[voiceMemoUpdateTimer invalidate];
}];
} else {
[oldVoiceMemoUI removeFromSuperview];
[voiceMemoUpdateTimer invalidate];
}
}
- (void)setVoiceMemoUICancelAlpha:(CGFloat)cancelAlpha
{
OWSAssert([NSThread isMainThread]);
// Fade out the voice message views as the cancel gesture
// proceeds as feedback.
for (UIView *subview in self.voiceMemoUI.subviews) {
subview.layer.opacity = MAX(0.f, MIN(1.f, 1.f - (float)cancelAlpha));
}
}
- (void)updateVoiceMemo
{
OWSAssert([NSThread isMainThread]);
NSTimeInterval durationSeconds = fabs([self.voiceMemoStartTime timeIntervalSinceNow]);
self.recordingLabel.text = [ViewControllerUtils formatDurationSeconds:(long)round(durationSeconds)];
[self.recordingLabel sizeToFit];
}
@end
#pragma mark -
@interface MessagesViewController () <JSQMessagesComposerTextViewPasteDelegate,
OWSTextViewPasteDelegate,
OWSMessagesToolbarContentDelegate,
OWSConversationSettingsViewDelegate,
UIDocumentMenuDelegate,
UIDocumentPickerDelegate> {
@ -476,6 +808,10 @@ typedef enum : NSUInteger {
selector:@selector(resetContentAndLayout)
name:UIApplicationWillEnterForegroundNotification
object:nil];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(applicationWillResignActive:)
name:UIApplicationWillResignActiveNotification
object:nil];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(cancelReadTimer)
name:UIApplicationDidEnterBackgroundNotification
@ -491,11 +827,19 @@ typedef enum : NSUInteger {
name:UIApplicationWillEnterForegroundNotification
object:nil];
[[NSNotificationCenter defaultCenter] removeObserver:self
name:UIApplicationDidEnterBackgroundNotification
object:nil];
name:UIApplicationWillResignActiveNotification
object:nil];
[[NSNotificationCenter defaultCenter] removeObserver:self
name:UIApplicationDidEnterBackgroundNotification
object:nil];
}
}
- (void)applicationWillResignActive:(NSNotification *)notification
{
[self cancelVoiceMemo];
}
- (void)initializeTextView {
[self.inputToolbar.contentView.textView setFont:[UIFont ows_dynamicTypeBodyFont]];
@ -561,6 +905,8 @@ typedef enum : NSUInteger {
[self ensureBlockStateIndicator];
[self resetContentAndLayout];
[((OWSMessagesToolbarContentView *)self.inputToolbar.contentView)ensureSubviews];
}
- (void)resetContentAndLayout
@ -749,6 +1095,8 @@ typedef enum : NSUInteger {
[self cancelReadTimer];
[self saveDraft];
[self cancelVoiceMemo];
}
- (void)startExpirationTimerAnimations
@ -1007,6 +1355,7 @@ typedef enum : NSUInteger {
OWSAssert(self.inputToolbar.contentView.textView);
self.inputToolbar.contentView.textView.pasteDelegate = self;
((OWSMessagesComposerTextView *) self.inputToolbar.contentView.textView).textViewPasteDelegate = self;
((OWSMessagesToolbarContentView *)self.inputToolbar.contentView).delegate = self;
}
// Overiding JSQMVC layout defaults
@ -2704,48 +3053,143 @@ typedef enum : NSUInteger {
#pragma mark - Audio
- (void)recordAudio {
// Define the recorder setting
NSArray *pathComponents = [NSArray
arrayWithObjects:[NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) lastObject],
[NSString stringWithFormat:@"%lld.m4a", [NSDate ows_millisecondTimeStamp]],
nil];
NSURL *outputFileURL = [NSURL fileURLWithPathComponents:pathComponents];
- (void)startRecordingVoiceMemo
{
OWSAssert([NSThread isMainThread]);
DDLogInfo(@"startRecordingVoiceMemo");
NSString *temporaryDirectory = NSTemporaryDirectory();
NSString *filename = [NSString stringWithFormat:@"%lld.m4a", [NSDate ows_millisecondTimeStamp]];
NSString *filepath = [temporaryDirectory stringByAppendingPathComponent:filename];
NSURL *fileURL = [NSURL fileURLWithPath:filepath];
// Setup audio session
AVAudioSession *session = [AVAudioSession sharedInstance];
[session setCategory:AVAudioSessionCategoryPlayAndRecord error:nil];
NSMutableDictionary *recordSetting = [[NSMutableDictionary alloc] init];
[recordSetting setValue:[NSNumber numberWithInt:kAudioFormatMPEG4AAC] forKey:AVFormatIDKey];
[recordSetting setValue:[NSNumber numberWithFloat:44100.0] forKey:AVSampleRateKey];
[recordSetting setValue:[NSNumber numberWithInt:2] forKey:AVNumberOfChannelsKey];
NSError *error;
[session setCategory:AVAudioSessionCategoryRecord error:&error];
if (error) {
DDLogError(@"%@ Couldn't configure audio session: %@", self.tag, error);
[self cancelVoiceMemo];
OWSAssert(0);
return;
}
// Initiate and prepare the recorder
_audioRecorder = [[AVAudioRecorder alloc] initWithURL:outputFileURL settings:recordSetting error:NULL];
_audioRecorder.delegate = self;
_audioRecorder.meteringEnabled = YES;
[_audioRecorder prepareToRecord];
}
- (void)audioRecorderDidFinishRecording:(AVAudioRecorder *)recorder successfully:(BOOL)flag {
if (flag) {
NSData *audioData = [NSData dataWithContentsOfURL:recorder.url];
SignalAttachment *attachment =
[SignalAttachment attachmentWithData:audioData dataUTI:(NSString *)kUTTypeMPEG4Audio filename:nil];
if (!attachment ||
[attachment hasError]) {
DDLogWarn(@"%@ %s Invalid attachment: %@.",
self.tag,
__PRETTY_FUNCTION__,
attachment ? [attachment errorName] : @"Missing data");
[self showErrorAlertForAttachment:attachment];
} else {
[self tryToSendAttachmentIfApproved:attachment];
}
self.audioRecorder = [[AVAudioRecorder alloc] initWithURL:fileURL
settings:@{
AVFormatIDKey : @(kAudioFormatMPEG4AAC),
AVSampleRateKey : @(44100),
AVNumberOfChannelsKey : @(2),
AVEncoderBitRateKey: @(128 * 1024),
}
error:&error];
if (error) {
DDLogError(@"%@ Couldn't create audioRecorder: %@", self.tag, error);
[self cancelVoiceMemo];
OWSAssert(0);
return;
}
self.audioRecorder.meteringEnabled = YES;
if (![self.audioRecorder prepareToRecord]) {
DDLogError(@"%@ audioRecorder couldn't prepareToRecord.", self.tag);
[self cancelVoiceMemo];
OWSAssert(0);
return;
}
if (![self.audioRecorder record]) {
DDLogError(@"%@ audioRecorder couldn't record.", self.tag);
[self cancelVoiceMemo];
OWSAssert(0);
return;
}
if (session.recordPermission != AVAudioSessionRecordPermissionGranted) {
DDLogInfo(@"%@ we do not have recording permission.", self.tag);
[self cancelVoiceMemo];
[OWSAlerts showNoMicrophonePermissionAlert];
return;
}
}
- (void)endRecordingVoiceMemo
{
OWSAssert([NSThread isMainThread]);
DDLogInfo(@"endRecordingVoiceMemo");
if (!self.audioRecorder) {
DDLogError(@"%@ Missing audioRecorder", self.tag);
OWSAssert(0);
return;
}
NSTimeInterval currentTime = self.audioRecorder.currentTime;
[self.audioRecorder stop];
const NSTimeInterval kMinimumRecordingTimeSeconds = 1.f;
if (currentTime < kMinimumRecordingTimeSeconds) {
DDLogInfo(@"Discarding voice message; too short.");
self.audioRecorder = nil;
[OWSAlerts
showAlertWithTitle:
NSLocalizedString(@"VOICE_MESSAGE_TOO_SHORT_ALERT_TITLE",
@"Title for the alert indicating the 'voice message' needs to be held to be held down to record.")
message:NSLocalizedString(@"VOICE_MESSAGE_TOO_SHORT_ALERT_MESSAGE",
@"Message for the alert indicating the 'voice message' needs to be held to be held "
@"down to record.")];
return;
}
NSData *audioData = [NSData dataWithContentsOfURL:self.audioRecorder.url];
if (!audioData) {
DDLogError(@"%@ Couldn't load audioRecorder data", self.tag);
OWSAssert(0);
self.audioRecorder = nil;
return;
}
self.audioRecorder = nil;
NSString *filename = [NSLocalizedString(@"VOICE_MESSAGE_FILE_NAME", @"Filename for voice messages.")
stringByAppendingPathExtension:[MIMETypeUtil fileExtensionForUTIType:(NSString *)kUTTypeMPEG4Audio]];
SignalAttachment *attachment =
[SignalAttachment attachmentWithData:audioData dataUTI:(NSString *)kUTTypeMPEG4Audio filename:filename];
if (!attachment || [attachment hasError]) {
DDLogWarn(@"%@ %s Invalid attachment: %@.",
self.tag,
__PRETTY_FUNCTION__,
attachment ? [attachment errorName] : @"Missing data");
[self showErrorAlertForAttachment:attachment];
} else {
[self tryToSendAttachmentIfApproved:attachment skipApprovalDialog:YES];
}
}
- (void)cancelRecordingVoiceMemo
{
OWSAssert([NSThread isMainThread]);
DDLogInfo(@"cancelRecordingVoiceMemo");
[self resetRecordingVoiceMemo];
}
- (void)resetRecordingVoiceMemo
{
OWSAssert([NSThread isMainThread]);
[self.audioRecorder stop];
self.audioRecorder = nil;
}
#pragma mark Accessory View
- (void)didPressAccessoryButton:(UIButton *)sender {
@ -3037,6 +3481,63 @@ typedef enum : NSUInteger {
completion:nil];
}
#pragma mark - OWSMessagesToolbarContentDelegate
- (void)voiceMemoGestureDidStart
{
OWSAssert([NSThread isMainThread]);
DDLogInfo(@"voiceMemoGestureDidStart");
[((OWSMessagesInputToolbar *)self.inputToolbar)showVoiceMemoUI];
[self startRecordingVoiceMemo];
}
- (void)voiceMemoGestureDidEnd
{
OWSAssert([NSThread isMainThread]);
DDLogInfo(@"voiceMemoGestureDidEnd");
[((OWSMessagesInputToolbar *)self.inputToolbar) hideVoiceMemoUI:YES];
[self endRecordingVoiceMemo];
}
- (void)voiceMemoGestureDidCancel
{
OWSAssert([NSThread isMainThread]);
DDLogInfo(@"voiceMemoGestureDidCancel");
[((OWSMessagesInputToolbar *)self.inputToolbar) hideVoiceMemoUI:NO];
[self cancelRecordingVoiceMemo];
}
- (void)voiceMemoGestureDidChange:(CGFloat)cancelAlpha
{
OWSAssert([NSThread isMainThread]);
[((OWSMessagesInputToolbar *)self.inputToolbar) setVoiceMemoUICancelAlpha:cancelAlpha];
}
- (void)cancelVoiceMemo
{
OWSAssert([NSThread isMainThread]);
[((OWSMessagesToolbarContentView *)self.inputToolbar.contentView)cancelVoiceMemoIfNecessary];
[((OWSMessagesInputToolbar *)self.inputToolbar) hideVoiceMemoUI:NO];
[self cancelRecordingVoiceMemo];
}
- (void)textViewDidChange:(UITextView *)textView
{
// Override.
//
// We want to show the "voice message" button if the text input is empty
// and the "send" button if it isn't.
[((OWSMessagesToolbarContentView *)self.inputToolbar.contentView)ensureEnabling];
}
#pragma mark - UIScrollViewDelegate
- (void)scrollViewWillBeginDragging:(UIScrollView *)scrollView

@ -310,7 +310,7 @@ NSString *const kSelectRecipientViewControllerCellIdentifier = @"kSelectRecipien
}
[activityAlert dismissViewControllerAnimated:NO
completion:^{
[ViewControllerUtils
[OWSAlerts
showAlertWithTitle:NSLocalizedString(@"ALERT_ERROR_TITLE",
@"Title for a generic error alert.")
message:error.localizedDescription];

@ -12,6 +12,7 @@
#import "OWSContactsManager.h"
#import "OWSTableViewController.h"
#import "SecurityUtils.h"
#import "Signal-Swift.h"
#import "SignalKeyingStorage.h"
#import "TSOutgoingMessage.h"
#import "UIUtil.h"
@ -307,7 +308,7 @@ NS_ASSUME_NONNULL_BEGIN
[weakSelf showUnblockAlertForRecipientId:recipientId];
}
} else {
[ViewControllerUtils
[OWSAlerts
showAlertWithTitle:
NSLocalizedString(@"UPDATE_GROUP_CANT_REMOVE_MEMBERS_ALERT_TITLE",
@"Title for alert indicating that group members can't be removed.")

@ -22,12 +22,4 @@
+ (NSString *)formatDurationSeconds:(long)timeSeconds;
#pragma mark - Alerts
+ (UIAlertController *)showAlertWithTitle:(NSString *)title message:(NSString *)message;
+ (UIAlertController *)showAlertWithTitle:(NSString *)title
message:(NSString *)message
buttonLabel:(NSString *)buttonLabel;
@end

@ -115,30 +115,6 @@ NS_ASSUME_NONNULL_BEGIN
}
}
#pragma mark - Alerts
+ (UIAlertController *)showAlertWithTitle:(NSString *)title message:(NSString *)message
{
return [self showAlertWithTitle:title message:message buttonLabel:NSLocalizedString(@"OK", nil)];
}
+ (UIAlertController *)showAlertWithTitle:(NSString *)title
message:(NSString *)message
buttonLabel:(NSString *)buttonLabel
{
OWSAssert(title.length > 0);
OWSAssert(message.length > 0);
UIAlertController *alert =
[UIAlertController alertControllerWithTitle:title message:message preferredStyle:UIAlertControllerStyleAlert];
[alert addAction:[UIAlertAction actionWithTitle:buttonLabel style:UIAlertActionStyleDefault handler:nil]];
[[UIApplication sharedApplication].frontmostViewController presentViewController:alert animated:YES completion:nil];
return alert;
}
#pragma mark - Logging
+ (NSString *)tag

@ -54,7 +54,7 @@ import Foundation
// Here the permissions are either granted or denied
guard isGranted == true else {
Logger.warn("\(self.TAG) aborting due to missing microphone permissions.")
self.showNoMicrophonePermissionAlert()
OWSAlerts.showNoMicrophonePermissionAlert()
return
}
callUIAdapter.startAndShowOutgoingCall(recipientId: recipientId)
@ -62,20 +62,4 @@ import Foundation
}
return true
}
/// Cleanup and present alert for no permissions
private func showNoMicrophonePermissionAlert() {
let alertTitle = NSLocalizedString("CALL_AUDIO_PERMISSION_TITLE", comment:"Alert title when calling and permissions for microphone are missing")
let alertMessage = NSLocalizedString("CALL_AUDIO_PERMISSION_MESSAGE", comment:"Alert message when calling and permissions for microphone are missing")
let alertController = UIAlertController(title: alertTitle, message: alertMessage, preferredStyle: .alert)
let dismiss = NSLocalizedString("DISMISS_BUTTON_TEXT", comment: "Generic short text for button to dismiss a dialog")
let dismissAction = UIAlertAction(title: dismiss, style: .cancel)
let settingsString = NSLocalizedString("OPEN_SETTINGS_BUTTON", comment: "Button text which opens the settings app")
let settingsAction = UIAlertAction(title: settingsString, style: .default) { _ in
UIApplication.shared.openSystemSettings()
}
alertController.addAction(dismissAction)
alertController.addAction(settingsAction)
UIApplication.shared.frontmostViewController?.present(alertController, animated: true, completion: nil)
}
}

@ -0,0 +1,37 @@
//
// Copyright (c) 2017 Open Whisper Systems. All rights reserved.
//
import Foundation
@objc class OWSAlerts: NSObject {
let TAG = "[OWSAlerts]"
/// Cleanup and present alert for no permissions
public class func showNoMicrophonePermissionAlert() {
let alertTitle = NSLocalizedString("CALL_AUDIO_PERMISSION_TITLE", comment:"Alert title when calling and permissions for microphone are missing")
let alertMessage = NSLocalizedString("CALL_AUDIO_PERMISSION_MESSAGE", comment:"Alert message when calling and permissions for microphone are missing")
let alertController = UIAlertController(title: alertTitle, message: alertMessage, preferredStyle: .alert)
let dismiss = NSLocalizedString("DISMISS_BUTTON_TEXT", comment: "Generic short text for button to dismiss a dialog")
let dismissAction = UIAlertAction(title: dismiss, style: .cancel)
let settingsString = NSLocalizedString("OPEN_SETTINGS_BUTTON", comment: "Button text which opens the settings app")
let settingsAction = UIAlertAction(title: settingsString, style: .default) { _ in
UIApplication.shared.openSystemSettings()
}
alertController.addAction(dismissAction)
alertController.addAction(settingsAction)
UIApplication.shared.frontmostViewController?.present(alertController, animated: true, completion: nil)
}
public class func showAlert(withTitle title: String, message: String) {
self.showAlert(withTitle: title, message: message, buttonLabel: NSLocalizedString("OK", comment: ""))
}
public class func showAlert(withTitle title: String, message: String, buttonLabel: String) {
assert(title.characters.count > 0)
assert(message.characters.count > 0)
let alert = UIAlertController(title: title, message: message, preferredStyle: .alert)
alert.addAction(UIAlertAction(title: buttonLabel, style: .default, handler: nil))
UIApplication.shared.frontmostViewController?.present(alert, animated: true, completion: nil)
}
}

@ -422,7 +422,7 @@
"ERROR_DESCRIPTION_NO_INTERNET" = "Signal was unable to connect to the internet. Please try from another WiFi network or use mobile data.";
/* Error indicating that an outgoing message had no valid recipients. */
"ERROR_DESCRIPTION_NO_VALID_RECIPIENTS" = "ERROR_DESCRIPTION_NO_VALID_RECIPIENTS";
"ERROR_DESCRIPTION_NO_VALID_RECIPIENTS" = "Message send failed due to a lack of valid recipients.";
/* Error message when attempting to send message */
"ERROR_DESCRIPTION_SENDING_UNAUTHORIZED" = "Your device is no longer registered for your phone number. You must remove and reinstall Signal.";
@ -860,9 +860,6 @@
/* Label for 'Pager' phone numbers. */
"PHONE_NUMBER_TYPE_PAGER" = "Pager";
/* Label for 'Radio' phone numbers. */
"PHONE_NUMBER_TYPE_RADIO" = "Radio";
/* Label used when we don't what kind of phone number it is (e.g. mobile/work/home). */
"PHONE_NUMBER_TYPE_UNKNOWN" = "Unknown";
@ -1280,6 +1277,18 @@
/* table cell label in conversation settings */
"VERIFY_PRIVACY" = "Verify Safety Number";
/* Indicates how to cancel a voice message. */
"VOICE_MESSAGE_CANCEL_INSTRUCTIONS" = "Slide to Cancel";
/* Filename for voice messages. */
"VOICE_MESSAGE_FILE_NAME" = "Voice Message";
/* Message for the alert indicating the 'voice message' needs to be held to be held down to record. */
"VOICE_MESSAGE_TOO_SHORT_ALERT_MESSAGE" = "Tap and hold to record a voice message.";
/* Title for the alert indicating the 'voice message' needs to be held to be held down to record. */
"VOICE_MESSAGE_TOO_SHORT_ALERT_TITLE" = "Voice Message";
/* Activity indicator title, shown upon returning to the device manager, until you complete the provisioning process on desktop */
"WAITING_TO_COMPLETE_DEVICE_LINK_TEXT" = "Complete setup on Signal Desktop.";

Loading…
Cancel
Save