diff --git a/.travis.yml b/.travis.yml index 6d2188ec1..cf2a2ba20 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,4 +1,4 @@ language: objective-c script: - - rake test \ No newline at end of file + - bundle exec rake test \ No newline at end of file diff --git a/ATLMessageCollectionViewCellTests.m b/ATLMessageCollectionViewCellTests.m new file mode 100644 index 000000000..a9f8fd3c0 --- /dev/null +++ b/ATLMessageCollectionViewCellTests.m @@ -0,0 +1,129 @@ +// +// ATLMessageCollectionViewCellTests.m +// Atlas +// +// Created by Kabir Mahal on 8/17/15. +// Copyright (c) 2015 Layer. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +#import +#import +#import +#import "ATLMessageCollectionViewCell.h" +#import "ATLTestClasses.h" +#import "ATLTestUtilities.h" +#define EXP_SHORTHAND +#import +#import +#import "LYRMessageMock.h" +#import "LYRMessagePartMock.h" + +@interface ATLMessageCollectionViewCellTests : XCTestCase + +@end + +@implementation ATLMessageCollectionViewCellTests + +- (void)setUp { + [super setUp]; + // Put setup code here. This method is called before the invocation of each test method in the class. +} + +- (void)tearDown { + // Put teardown code here. This method is called after the invocation of each test method in the class. + [super tearDown]; +} + +- (void)testThatAsynchronousGifLoadingDoesNotUpdateReusedCells +{ + ATLMessageCollectionViewCell *cell = [[ATLMessageCollectionViewCell alloc] initWithFrame:CGRectZero]; + ATLMessageBubbleView *bubbleView = cell.bubbleView; + id partialMock = OCMPartialMock(bubbleView); + [[[partialMock reject] ignoringNonObjectArgs] updateWithImage:[OCMArg any] width:1337]; + + NSBundle *parentBundle = [NSBundle bundleForClass:[self class]]; + NSURL *url = [parentBundle URLForResource:@"boatgif" withExtension:@"gif"]; + NSData *data = [NSData dataWithContentsOfURL:url]; + UIImage *gif = [UIImage imageWithData:data]; + + LYRMessagePartMock *part1 = [LYRMessagePartMock messagePartWithMIMEType:ATLMIMETypeImageGIF data:data]; + data = part1.data; + NSDictionary *imageMetadata = @{ @"width": @(gif.size.width), + @"height": @(gif.size.height), + @"orientation": @(gif.imageOrientation) }; + NSData *JSONData = [NSJSONSerialization dataWithJSONObject:imageMetadata options:NSJSONWritingPrettyPrinted error:nil]; + LYRMessagePartMock *part2 = [LYRMessagePartMock messagePartWithMIMEType:ATLMIMETypeImageSize data:JSONData]; + LYRMessageMock *messageMock1 = [LYRMessageMock newMessageWithParts:@[ part1, part2 ] senderID:[ATLUserMock userWithMockUserName:ATLMockUserNameKlemen].participantIdentifier]; + LYRMessageMock *messageMock2 = [LYRMessageMock newMessageWithParts:@[ [LYRMessagePartMock messagePartWithMIMEType:@"text/plain" data:[@"test" dataUsingEncoding:NSUTF8StringEncoding]] ] senderID:[ATLUserMock userWithMockUserName:ATLMockUserNameKlemen].participantIdentifier]; + + dispatch_semaphore_t semaphore = dispatch_semaphore_create(2); + + id partialmockedPart = OCMPartialMock(part1); + [[partialmockedPart expect] andForwardToRealObject]; + [[partialmockedPart expect] andForwardToRealObject]; + [[partialmockedPart expect] andForwardToRealObject]; + [[partialmockedPart expect] andDo:^(NSInvocation *invocation) { + dispatch_semaphore_wait(semaphore, DISPATCH_TIME_FOREVER); + [invocation setReturnValue:(__bridge void *)(data)]; + }]; + [cell presentMessage:(LYRMessage *)messageMock1]; + [cell prepareForReuse]; + [cell presentMessage:(LYRMessage *)messageMock2]; + + dispatch_semaphore_signal(semaphore); + + [partialMock verifyWithDelay:2.0f]; +} + +- (void)testThatAsynchronousImageLoadingDoesNotUpdateReusedCells +{ + ATLMessageCollectionViewCell *cell = [[ATLMessageCollectionViewCell alloc] initWithFrame:CGRectZero]; + ATLMessageBubbleView *bubbleView = cell.bubbleView; + id partialMock = OCMPartialMock(bubbleView); + [[[partialMock reject] ignoringNonObjectArgs] updateWithImage:[OCMArg any] width:1337]; + + UIImage *image = ATLTestAttachmentMakeImageWithSize(CGSizeMake(800, 800)); + NSData *data = UIImageJPEGRepresentation(image, 1.0f); + + LYRMessagePartMock *part1 = [LYRMessagePartMock messagePartWithMIMEType:ATLMIMETypeImageJPEG data:data]; + data = part1.data; + NSDictionary *imageMetadata = @{ @"width": @(image.size.width), + @"height": @(image.size.height), + @"orientation": @(image.imageOrientation) }; + NSData *JSONData = [NSJSONSerialization dataWithJSONObject:imageMetadata options:NSJSONWritingPrettyPrinted error:nil]; + LYRMessagePartMock *part2 = [LYRMessagePartMock messagePartWithMIMEType:ATLMIMETypeImageSize data:JSONData]; + LYRMessageMock *messageMock1 = [LYRMessageMock newMessageWithParts:@[ part1, part2 ] senderID:[ATLUserMock userWithMockUserName:ATLMockUserNameKlemen].participantIdentifier]; + LYRMessageMock *messageMock2 = [LYRMessageMock newMessageWithParts:@[ [LYRMessagePartMock messagePartWithMIMEType:@"text/plain" data:[@"test" dataUsingEncoding:NSUTF8StringEncoding]] ] senderID:[ATLUserMock userWithMockUserName:ATLMockUserNameKlemen].participantIdentifier]; + + dispatch_semaphore_t semaphore = dispatch_semaphore_create(2); + + id partialmockedPart = OCMPartialMock(part1); + [[partialmockedPart expect] andForwardToRealObject]; + [[partialmockedPart expect] andForwardToRealObject]; + [[partialmockedPart expect] andForwardToRealObject]; + [[partialmockedPart expect] andDo:^(NSInvocation *invocation) { + dispatch_semaphore_wait(semaphore, DISPATCH_TIME_FOREVER); + [invocation setReturnValue:(__bridge void *)(data)]; + }]; + [cell presentMessage:(LYRMessage *)messageMock1]; + [cell prepareForReuse]; + [cell presentMessage:(LYRMessage *)messageMock2]; + + dispatch_semaphore_signal(semaphore); + + [partialMock verifyWithDelay:2.0f]; +} + +@end diff --git a/Atlas.podspec b/Atlas.podspec index e79d61eb8..b74a2e312 100644 --- a/Atlas.podspec +++ b/Atlas.podspec @@ -1,6 +1,6 @@ Pod::Spec.new do |s| s.name = "Atlas" - s.version = '1.0.0' + s.version = '1.0.11' s.summary = "Atlas is a library of communications user interface components integrated with LayerKit." s.homepage = 'https://atlas.layer.com/' s.social_media_url = 'http://twitter.com/layer' @@ -18,5 +18,5 @@ Pod::Spec.new do |s| s.header_mappings_dir = 'Code' s.ios.frameworks = %w{UIKit CoreLocation MobileCoreServices} s.ios.deployment_target = '7.0' - s.dependency 'LayerKit' + s.dependency 'LayerKit', '>= 0.16.0' end diff --git a/Atlas.xcodeproj/project.pbxproj b/Atlas.xcodeproj/project.pbxproj index 9110179cf..e2970a342 100644 --- a/Atlas.xcodeproj/project.pbxproj +++ b/Atlas.xcodeproj/project.pbxproj @@ -9,9 +9,18 @@ /* Begin PBXBuildFile section */ 2B6445637A4A449ABC5750C4 /* libPods-StoryboardTests.a in Frameworks */ = {isa = PBXBuildFile; fileRef = DFFB13F66B57103A728CA624 /* libPods-StoryboardTests.a */; }; 377EEF73853F61FE5B5568A2 /* libPods-Storyboard.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 12E6C660682C89FC093228B5 /* libPods-Storyboard.a */; }; + 5A86A2D11B82BB11005AF74B /* ATLMessageCollectionViewCellTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 5A86A2D01B82BB11005AF74B /* ATLMessageCollectionViewCellTests.m */; }; + 5A86A2D21B82BB11005AF74B /* ATLMessageCollectionViewCellTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 5A86A2D01B82BB11005AF74B /* ATLMessageCollectionViewCellTests.m */; }; + 5AEFC5CB1B79834300ADE4EC /* boatgif.gif in Resources */ = {isa = PBXBuildFile; fileRef = 5AEFC5CA1B79834300ADE4EC /* boatgif.gif */; }; + 5AEFC5CC1B79834300ADE4EC /* boatgif.gif in Resources */ = {isa = PBXBuildFile; fileRef = 5AEFC5CA1B79834300ADE4EC /* boatgif.gif */; }; + 5AF32FD31AA59C39005C13D7 /* ATLUserMockTest.m in Sources */ = {isa = PBXBuildFile; fileRef = 5AF32FD21AA59C39005C13D7 /* ATLUserMockTest.m */; }; CE7BDC080170509511B89900 /* libPods-UnitTests.a in Frameworks */ = {isa = PBXBuildFile; fileRef = F7012754989ED85BE6A8D69E /* libPods-UnitTests.a */; }; D0294DE71A93F37A00702856 /* Info.plist in Resources */ = {isa = PBXBuildFile; fileRef = D0294DE61A93F37A00702856 /* Info.plist */; }; D0294DE81A93F39300702856 /* ATLMediaStreamTests.m in Sources */ = {isa = PBXBuildFile; fileRef = D0294DE41A93F37000702856 /* ATLMediaStreamTests.m */; }; + D02CDCDB1A9FFC930013CBE8 /* ATLTestUtilities.m in Sources */ = {isa = PBXBuildFile; fileRef = D02CDCDA1A9FFC930013CBE8 /* ATLTestUtilities.m */; }; + D02CDCDC1A9FFC930013CBE8 /* ATLTestUtilities.m in Sources */ = {isa = PBXBuildFile; fileRef = D02CDCDA1A9FFC930013CBE8 /* ATLTestUtilities.m */; }; + D02CDCDD1A9FFC930013CBE8 /* ATLTestUtilities.m in Sources */ = {isa = PBXBuildFile; fileRef = D02CDCDA1A9FFC930013CBE8 /* ATLTestUtilities.m */; }; + D04517141A9F9A0C00E137D9 /* ATLMediaAttachmentTests.m in Sources */ = {isa = PBXBuildFile; fileRef = D04517131A9F9A0C00E137D9 /* ATLMediaAttachmentTests.m */; }; D2575596FE7E9432B48A0D74 /* libPods-ProgrammaticTests.a in Frameworks */ = {isa = PBXBuildFile; fileRef = D82E912F9EB0D3AB8BD452A2 /* libPods-ProgrammaticTests.a */; }; D63C68E11A94250000D235D5 /* test-logo.png in Resources */ = {isa = PBXBuildFile; fileRef = D63C68DF1A94250000D235D5 /* test-logo.png */; }; D6A122B51A9D2CA2004F67BE /* ATLTestClasses.m in Sources */ = {isa = PBXBuildFile; fileRef = D6F4060A1A8FDEEB00AE9581 /* ATLTestClasses.m */; }; @@ -55,7 +64,6 @@ D6F405E71A8FDE3900AE9581 /* Images.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = D6F405E21A8FDE3900AE9581 /* Images.xcassets */; }; D6F405E91A8FDE3900AE9581 /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = D6F405E41A8FDE3900AE9581 /* main.m */; }; D6F406261A8FDF5900AE9581 /* StoryboardAppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = D6F405D61A8FDE1B00AE9581 /* StoryboardAppDelegate.m */; }; - D6F4062B1A8FE20D00AE9581 /* StoryboardTests-Info.plist in Resources */ = {isa = PBXBuildFile; fileRef = D6F406101A8FDEEB00AE9581 /* StoryboardTests-Info.plist */; }; D6F4062C1A8FE28000AE9581 /* ATLConversationListViewControllerTest.m in Sources */ = {isa = PBXBuildFile; fileRef = D6F406011A8FDEEB00AE9581 /* ATLConversationListViewControllerTest.m */; }; D6F4062D1A8FE28000AE9581 /* ATLConversationTableViewCellTest.m in Sources */ = {isa = PBXBuildFile; fileRef = D6F406021A8FDEEB00AE9581 /* ATLConversationTableViewCellTest.m */; }; D6F4062E1A8FE28000AE9581 /* ATLConversationViewControllerTest.m in Sources */ = {isa = PBXBuildFile; fileRef = D6F406031A8FDEEB00AE9581 /* ATLConversationViewControllerTest.m */; }; @@ -107,6 +115,9 @@ 358D1AE37F6A67F9CE5ADC47 /* libPods-Unit Tests.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = "libPods-Unit Tests.a"; sourceTree = BUILT_PRODUCTS_DIR; }; 4A1A0BE9E193CC265341461A /* Pods-Unit Tests.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Unit Tests.release.xcconfig"; path = "Pods/Target Support Files/Pods-Unit Tests/Pods-Unit Tests.release.xcconfig"; sourceTree = ""; }; 5A30E6805E705AAD798FE808 /* Pods-Programmatic.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Programmatic.release.xcconfig"; path = "Pods/Target Support Files/Pods-Programmatic/Pods-Programmatic.release.xcconfig"; sourceTree = ""; }; + 5A86A2D01B82BB11005AF74B /* ATLMessageCollectionViewCellTests.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = ATLMessageCollectionViewCellTests.m; sourceTree = ""; }; + 5AEFC5CA1B79834300ADE4EC /* boatgif.gif */ = {isa = PBXFileReference; lastKnownFileType = image.gif; path = boatgif.gif; sourceTree = ""; }; + 5AF32FD21AA59C39005C13D7 /* ATLUserMockTest.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = ATLUserMockTest.m; sourceTree = ""; }; 5C9A8B1181386C002F1F0585 /* Pods-Conversation.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Conversation.release.xcconfig"; path = "Pods/Target Support Files/Pods-Conversation/Pods-Conversation.release.xcconfig"; sourceTree = ""; }; 6334472F1BD9D5C3E110141E /* Pods-UnitTests.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-UnitTests.debug.xcconfig"; path = "Pods/Target Support Files/Pods-UnitTests/Pods-UnitTests.debug.xcconfig"; sourceTree = ""; }; 866487157645D4273D55AEF8 /* Pods-ProgrammaticTests.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-ProgrammaticTests.release.xcconfig"; path = "Pods/Target Support Files/Pods-ProgrammaticTests/Pods-ProgrammaticTests.release.xcconfig"; sourceTree = ""; }; @@ -115,6 +126,9 @@ D0294DD81A93F33900702856 /* UnitTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = UnitTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; }; D0294DE41A93F37000702856 /* ATLMediaStreamTests.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = ATLMediaStreamTests.m; path = Tests/ATLMediaStreamTests.m; sourceTree = ""; }; D0294DE61A93F37A00702856 /* Info.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; name = Info.plist; path = Tests/Info.plist; sourceTree = ""; }; + D02CDCD91A9FFC930013CBE8 /* ATLTestUtilities.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = ATLTestUtilities.h; sourceTree = ""; }; + D02CDCDA1A9FFC930013CBE8 /* ATLTestUtilities.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = ATLTestUtilities.m; sourceTree = ""; }; + D04517131A9F9A0C00E137D9 /* ATLMediaAttachmentTests.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = ATLMediaAttachmentTests.m; path = Tests/ATLMediaAttachmentTests.m; sourceTree = ""; }; D63C68DF1A94250000D235D5 /* test-logo.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "test-logo.png"; sourceTree = ""; }; D6C045431A9914080028E9D0 /* ProgrammaticAppDelegate.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = ProgrammaticAppDelegate.h; path = Examples/Programmatic/ProgrammaticAppDelegate.h; sourceTree = SOURCE_ROOT; }; D6C045441A9914080028E9D0 /* ProgrammaticAppDelegate.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = ProgrammaticAppDelegate.m; path = Examples/Programmatic/ProgrammaticAppDelegate.m; sourceTree = SOURCE_ROOT; }; @@ -149,7 +163,6 @@ D6F405D41A8FDE1B00AE9581 /* main.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = main.m; path = Examples/Storyboard/main.m; sourceTree = SOURCE_ROOT; }; D6F405D51A8FDE1B00AE9581 /* StoryboardAppDelegate.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = StoryboardAppDelegate.h; path = Examples/Storyboard/StoryboardAppDelegate.h; sourceTree = SOURCE_ROOT; }; D6F405D61A8FDE1B00AE9581 /* StoryboardAppDelegate.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = StoryboardAppDelegate.m; path = Examples/Storyboard/StoryboardAppDelegate.m; sourceTree = SOURCE_ROOT; }; - D6F405DE1A8FDE3900AE9581 /* .DS_Store */ = {isa = PBXFileReference; lastKnownFileType = text; name = .DS_Store; path = Examples/Programmatic/.DS_Store; sourceTree = SOURCE_ROOT; }; D6F405E11A8FDE3900AE9581 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.xib; name = Base; path = LaunchScreen.xib; sourceTree = ""; }; D6F405E21A8FDE3900AE9581 /* Images.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; name = Images.xcassets; path = Examples/Programmatic/Images.xcassets; sourceTree = SOURCE_ROOT; }; D6F405E31A8FDE3900AE9581 /* Info.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; name = Info.plist; path = Examples/Programmatic/Info.plist; sourceTree = SOURCE_ROOT; }; @@ -263,7 +276,9 @@ D0294DE31A93F36100702856 /* Unit Tests */ = { isa = PBXGroup; children = ( + 5A86A2D01B82BB11005AF74B /* ATLMessageCollectionViewCellTests.m */, D0294DE41A93F37000702856 /* ATLMediaStreamTests.m */, + D04517131A9F9A0C00E137D9 /* ATLMediaAttachmentTests.m */, D0294DE61A93F37A00702856 /* Info.plist */, ); name = "Unit Tests"; @@ -361,6 +376,7 @@ isa = PBXGroup; children = ( D6F4060D1A8FDEEB00AE9581 /* LYRClientMockTests.m */, + 5AF32FD21AA59C39005C13D7 /* ATLUserMockTest.m */, ); name = Mocks; sourceTree = ""; @@ -372,6 +388,8 @@ D6F4060A1A8FDEEB00AE9581 /* ATLTestClasses.m */, D6F4060B1A8FDEEB00AE9581 /* ATLTestInterface.h */, D6F4060C1A8FDEEB00AE9581 /* ATLTestInterface.m */, + D02CDCD91A9FFC930013CBE8 /* ATLTestUtilities.h */, + D02CDCDA1A9FFC930013CBE8 /* ATLTestUtilities.m */, ); name = Utilities; sourceTree = ""; @@ -410,6 +428,7 @@ D6F4060E1A8FDEEB00AE9581 /* Resources */ = { isa = PBXGroup; children = ( + 5AEFC5CA1B79834300ADE4EC /* boatgif.gif */, D63C68DF1A94250000D235D5 /* test-logo.png */, D6F4060F1A8FDEEB00AE9581 /* ProgrammaticTests-Info.plist */, D6F406101A8FDEEB00AE9581 /* StoryboardTests-Info.plist */, @@ -430,7 +449,6 @@ children = ( D6C045431A9914080028E9D0 /* ProgrammaticAppDelegate.h */, D6C045441A9914080028E9D0 /* ProgrammaticAppDelegate.m */, - D6F405DE1A8FDE3900AE9581 /* .DS_Store */, D6F405DF1A8FDE3900AE9581 /* Base.lproj */, D6F405E21A8FDE3900AE9581 /* Images.xcassets */, D6F405E31A8FDE3900AE9581 /* Info.plist */, @@ -626,6 +644,7 @@ isa = PBXResourcesBuildPhase; buildActionMask = 2147483647; files = ( + 5AEFC5CB1B79834300ADE4EC /* boatgif.gif in Resources */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -641,8 +660,8 @@ isa = PBXResourcesBuildPhase; buildActionMask = 2147483647; files = ( - D6F4062B1A8FE20D00AE9581 /* StoryboardTests-Info.plist in Resources */, D63C68E11A94250000D235D5 /* test-logo.png in Resources */, + 5AEFC5CC1B79834300ADE4EC /* boatgif.gif in Resources */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -806,7 +825,9 @@ isa = PBXSourcesBuildPhase; buildActionMask = 2147483647; files = ( + D02CDCDD1A9FFC930013CBE8 /* ATLTestUtilities.m in Sources */, D0294DE81A93F39300702856 /* ATLMediaStreamTests.m in Sources */, + D04517141A9F9A0C00E137D9 /* ATLMediaAttachmentTests.m in Sources */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -820,6 +841,7 @@ D6F405CA1A8FDDE300AE9581 /* LYRQueryControllerMock.m in Sources */, D6F405E91A8FDE3900AE9581 /* main.m in Sources */, D6F405C51A8FDDE300AE9581 /* LYRClientMock.m in Sources */, + D02CDCDB1A9FFC930013CBE8 /* ATLTestUtilities.m in Sources */, D6F405B01A8FDDAA00AE9581 /* ATLSampleConversationListViewController.m in Sources */, D6F405C81A8FDDE300AE9581 /* LYRMessagePartMock.m in Sources */, D6F405B21A8FDDAA00AE9581 /* ATLSampleParticipantTableViewController.m in Sources */, @@ -840,9 +862,11 @@ D6F4062C1A8FE28000AE9581 /* ATLConversationListViewControllerTest.m in Sources */, D6F4062D1A8FE28000AE9581 /* ATLConversationTableViewCellTest.m in Sources */, D6F4062E1A8FE28000AE9581 /* ATLConversationViewControllerTest.m in Sources */, + 5AF32FD31AA59C39005C13D7 /* ATLUserMockTest.m in Sources */, D6F4062F1A8FE28000AE9581 /* ATLMessageCollectionViewCellTest.m in Sources */, D6F406301A8FE28000AE9581 /* ATLMessageInputBarTest.m in Sources */, D6C0454F1A9953C70028E9D0 /* ALTAddressBarControllerTest.m in Sources */, + 5A86A2D11B82BB11005AF74B /* ATLMessageCollectionViewCellTests.m in Sources */, D6F406321A8FE28000AE9581 /* ATLParticipantTableViewControllerTest.m in Sources */, D6F406331A8FE28000AE9581 /* ATLParticipantTableViewCellTest.m in Sources */, ); @@ -858,6 +882,7 @@ D6D48F871A9D140F00C8872E /* LYRMessageMock.m in Sources */, D6D48F881A9D140F00C8872E /* LYRMessagePartMock.m in Sources */, D6D48F891A9D140F00C8872E /* LYRMockContentStore.m in Sources */, + D02CDCDC1A9FFC930013CBE8 /* ATLTestUtilities.m in Sources */, D6D48F8A1A9D140F00C8872E /* LYRQueryControllerMock.m in Sources */, D6D48F8B1A9D140F00C8872E /* ATLUserMock.m in Sources */, D6D48F8C1A9D140F00C8872E /* ATLSampleConversationAvatarItem.m in Sources */, @@ -882,6 +907,7 @@ D6A122BC1A9D2CA2004F67BE /* ATLConversationViewControllerTest.m in Sources */, D6A122BD1A9D2CA2004F67BE /* ATLMessageInputBarTest.m in Sources */, D6A122BE1A9D2CA2004F67BE /* ATLParticipantTableViewControllerTest.m in Sources */, + 5A86A2D21B82BB11005AF74B /* ATLMessageCollectionViewCellTests.m in Sources */, D6A122BF1A9D2CA2004F67BE /* ALTAddressBarControllerTest.m in Sources */, ); runOnlyForDeploymentPostprocessing = 0; @@ -1040,6 +1066,7 @@ buildSettings = { ALWAYS_SEARCH_USER_PATHS = NO; ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + ASSETCATALOG_COMPILER_LAUNCHIMAGE_NAME = LaunchImage; CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; CLANG_CXX_LIBRARY = "libc++"; CLANG_ENABLE_MODULES = YES; @@ -1086,6 +1113,7 @@ buildSettings = { ALWAYS_SEARCH_USER_PATHS = NO; ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + ASSETCATALOG_COMPILER_LAUNCHIMAGE_NAME = LaunchImage; CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; CLANG_CXX_LIBRARY = "libc++"; CLANG_ENABLE_MODULES = YES; diff --git a/Atlas.xcodeproj/xcshareddata/xcschemes/ProgrammaticTests.xcscheme b/Atlas.xcodeproj/xcshareddata/xcschemes/ProgrammaticTests.xcscheme new file mode 100644 index 000000000..72128c367 --- /dev/null +++ b/Atlas.xcodeproj/xcshareddata/xcschemes/ProgrammaticTests.xcscheme @@ -0,0 +1,134 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/CHANGELOG.md b/CHANGELOG.md index 7e1db7925..5869df193 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,110 @@ # Atlas Changelog +## 1.0.11 + +### Public API Changes + +* Implemented `conversationViewController:configureCell:forMessage:` to allow `ATLConversationViewController` subclasses to add extra cell configuration. +* Added `shouldDisplayAvatarItemForAuthenticatedUser` to `ATLConversationViewController` to display avatar items for the authenticated user. +* Added `ATLAvatarItemDisplayFrequency` property to `ATLConversationViewController` to allow customization of avatar frequency. +* Exposed `LYRQueryController` on `ATLConversationViewController`. +* Added `NSTextCheckingType` on `ATLMessageBubbleView`. +* Added `menuControllerActions` on `ATLMessageBubbleView` to customize UIMenuController actions. + +### Enhancements + +* `ATLConversationViewController` caches unsent media attachments in the `ATLMessageInputToolbar` upon deallocation, and re-inserts them on creation. +* Added Localization support. +* Asynchronous image and GIF loading in `ATLMessageCollectionViewCell`. + +### Bug Fixes + +* Fixed bug that caused avatar images to flicker when loading photos from remote URLs. +* Fixed bug that caused UIMenuController to stay on screen during pan gesture. +* Fixed bug that caused images to stretch if smaller than the minimum cell size. + +## 1.0.10 + +### Bug Fixes + +* Fixed bug introduced in 1.0.9 relating to media attachment text color for attributed string. + +## 1.0.9 + +### Public API Changes + +* Exposed private initializers of `ATLConversationViewController` and `ATLConversationListViewController` to allow subclassing for custom initialization. + +### Bug Fixes + +* Removed compiler warnings that showed from direct installation due to deprecations. + +## 1.0.8 + +### Enhancements + +* Updated change notification handling code due to LayerKit library upgrade to v0.13.3, which has some braking changes in change notifications dictionary. + +## 1.0.7 + +### Public API Changes + +* Implemented `conversationListViewController:configurationForDefaultQuery:` to provide for query customization in the `ATLConversationListViewController`. +* Implemented `conversationViewController:configurationForDefaultQuery:` to provide for query customization in the `ATLConversationViewController`. + +## 1.0.6 + +### Bug Fixes + +* Removed all compiler warnings. + +## 1.0.5 + +### Public API Changes + +* Added `avatarImageURL` property to `ATLAvatarItem`. + +### Enhancements + +* Added logic to fetch image from a URL to `ATLAvatarImageView`. +* Added image cache to `ATLAvatarImageView`. + +### Bug Fixes + +* Fixed bug which caused `ATLConversationViewController` animation assertions when attempting to reload cells via the public API. +* Fixed bug which prevented cell font customizations from being appied. + +## 1.0.4 + +### Public API Changes + +* Moved `searchController` property to public API on `ATLConversationListViewController`. +* Moved `UIImagePickerControllerDelegate` and `UINavigationControllerDelegate` declarations to header of `ATLConversationViewController`. +* Added `leftAccessoryImage`, `rightAccessoryImage` and `displaysRightAccessoryImage` to `ATLMessageInputToolbar`. + +## 1.0.3 + +### Enhancements + +* Introduced new functionality to facilitate reloading content after asynchronous reloads of user information. + +### Public API Changes + +* Added `reloadCellsForMessagesSentByParticipantWithIdentitifier:` to `ATLConversationViewController`. +* Removed `collectionViewCellForMessage:` from `ATLConversationViewController`. + +## 1.0.2 + +### Public API Changes + +* Added `conversationListViewController:textForButtonWithDeletionMode:` to `ATLConversationListViewController` +* Added `conversationListViewController:colorForButtonWithDeletionMode:` to `ATLConversationListViewController` + +## 1.0.1 + +* Updated LayerKit dependency to v0.10.3 +* New compatible with CocoaPods >= 0.36.0 + ## 1.0.0 * Initial public release of Atlas. diff --git a/Code/Atlas.m b/Code/Atlas.m index e7ef650e3..916797076 100644 --- a/Code/Atlas.m +++ b/Code/Atlas.m @@ -20,4 +20,4 @@ #import "Atlas.h" -NSString *const ATLVersionString = @"1.0.0"; +NSString *const ATLVersionString = @"1.0.11"; diff --git a/Code/Controllers/ATLAddressBarViewController.h b/Code/Controllers/ATLAddressBarViewController.h index 77a634250..6da0b5b84 100644 --- a/Code/Controllers/ATLAddressBarViewController.h +++ b/Code/Controllers/ATLAddressBarViewController.h @@ -105,17 +105,35 @@ */ @property (nonatomic) ATLAddressBarView *addressBarView; +///------------------------------------ +// @name Managing Participant Selection +///------------------------------------ + /** - @abstract The `NSOrderedSet` of currently selected participants. + @abstract An ordered set of the currently selected participants. */ @property (nonatomic) NSOrderedSet *selectedParticipants; /** @abstract Informs the receiver that a selection occurred outside of the controller and a participant should be added to the address bar. + @param participant The participant to select. */ - (void)selectParticipant:(id)participant; +///------------------------- +/// @name Reloading the View +///------------------------- + +/** + @abstract Tells the receiver to reload the view with the latest details of the participants. + */ +- (void)reloadView; + +///---------------------- +/// @name Disabling Input +///---------------------- + /** @abstract Disables user input and searching. */ diff --git a/Code/Controllers/ATLAddressBarViewController.m b/Code/Controllers/ATLAddressBarViewController.m index b887d6865..5cc0c4305 100644 --- a/Code/Controllers/ATLAddressBarViewController.m +++ b/Code/Controllers/ATLAddressBarViewController.m @@ -21,10 +21,10 @@ #import "ATLAddressBarViewController.h" #import "ATLConstants.h" #import "ATLAddressBarContainerView.h" +#import "ATLMessagingUtilities.h" @interface ATLAddressBarViewController () -@property (nonatomic) ATLAddressBarContainerView *view; @property (nonatomic) UITableView *tableView; @property (nonatomic) NSArray *participants; @property (nonatomic, getter=isDisabled) BOOL disabled; @@ -130,6 +130,11 @@ - (void)setSelectedParticipants:(NSOrderedSet *)selectedParticipants [self searchEnded]; } +- (void)reloadView +{ + [self.tableView reloadData]; +} + #pragma mark - UITableViewDataSource - (NSInteger)numberOfSectionsInTableView:(UITableView *)tableView @@ -144,7 +149,7 @@ - (NSInteger)tableView:(UITableView *)tableView numberOfRowsInSection:(NSInteger - (UITableViewCell *)tableView:(UITableView *)tableView cellForRowAtIndexPath:(NSIndexPath *)indexPath { - UITableViewCell *cell =[tableView dequeueReusableCellWithIdentifier:ATLMParticpantCellIdentifier]; + UITableViewCell *cell = [tableView dequeueReusableCellWithIdentifier:ATLMParticpantCellIdentifier]; id participant = self.participants[indexPath.row]; cell.textLabel.text = participant.fullName; cell.textLabel.font = ATLMediumFont(16); @@ -482,8 +487,8 @@ - (NSString *)disabledStringForParticipants:(NSOrderedSet *)participants - (NSString *)otherStringWithRemainingParticipants:(NSUInteger)remainingParticipants { - NSString *othersString = (remainingParticipants > 1) ? @"others" : @"other"; - return [NSString stringWithFormat:@"and %lu %@", (unsigned long)remainingParticipants, othersString]; + NSString *othersString = (remainingParticipants > 1) ? ATLLocalizedString(@"atl.addressbar.others.key", @"other", nil) : ATLLocalizedString(@"atl.addressbar.other.key", @"other", nil); + return [NSString stringWithFormat:@"%@ %lu %@", ATLLocalizedString(@"atl.addressbar.and.key", @"and", nil), (unsigned long)remainingParticipants, othersString]; } - (BOOL)textViewHasSpaceForParticipantString:(NSString *)participantString diff --git a/Code/Controllers/ATLBaseConversationViewController.m b/Code/Controllers/ATLBaseConversationViewController.m index c044b5acf..a9210dc5f 100644 --- a/Code/Controllers/ATLBaseConversationViewController.m +++ b/Code/Controllers/ATLBaseConversationViewController.m @@ -33,6 +33,8 @@ @interface ATLBaseConversationViewController () @implementation ATLBaseConversationViewController +@dynamic view; + static CGFloat const ATLTypingIndicatorHeight = 20; static CGFloat const ATLMaxScrollDistanceFromBottom = 150; @@ -59,7 +61,6 @@ - (void)baseCommonInit _displaysAddressBar = NO; _typingParticipantIDs = [NSMutableArray new]; _firstAppearance = YES; - _displaysAddressBar = YES; } - (void)loadView @@ -73,9 +74,12 @@ - (void)viewDidLoad // Add message input tool bar self.messageInputToolbar = [ATLMessageInputToolbar new]; + // Fixes an ios9 bug that causes the background of the input accessory view to be black when being presented on screen. + self.messageInputToolbar.translucent = NO; // An apparent system bug causes a view controller to not be deallocated // if the view controller's own inputAccessoryView property is used. self.view.inputAccessoryView = self.messageInputToolbar; + self.messageInputToolbar.containerViewController = self; // Add typing indicator self.typingIndicatorController = [[ATLTypingIndicatorViewController alloc] init]; @@ -104,12 +108,17 @@ - (void)viewWillAppear:(BOOL)animated [self.view becomeFirstResponder]; } if (self.addressBarController && self.firstAppearance) { - [self.addressBarController.view layoutIfNeeded]; [self updateTopCollectionViewInset]; } [self updateBottomCollectionViewInset]; } +- (void)viewDidAppear:(BOOL)animated +{ + [super viewDidAppear:animated]; + self.messageInputToolbar.translucent = YES; +} + - (void)viewDidLayoutSubviews { [super viewDidLayoutSubviews]; @@ -128,11 +137,11 @@ - (void)viewDidLayoutSubviews } } - - (void)viewWillDisappear:(BOOL)animated { [super viewWillDisappear:animated]; + self.messageInputToolbar.translucent = NO; // Workaround for view's content flashing onscreen after pop animation concludes on iOS 8. BOOL isPopping = ![self.navigationController.viewControllers containsObject:self]; if (isPopping) { @@ -179,11 +188,16 @@ - (void)scrollToBottomAnimated:(BOOL)animated [self.collectionView setContentOffset:[self bottomOffsetForContentSize:contentSize] animated:animated]; } +#pragma mark - Content Inset Management + - (void)updateTopCollectionViewInset { + [self.addressBarController.view layoutIfNeeded]; + UIEdgeInsets contentInset = self.collectionView.contentInset; UIEdgeInsets scrollIndicatorInsets = self.collectionView.scrollIndicatorInsets; CGRect frame = [self.view convertRect:self.addressBarController.addressBarView.frame fromView:self.addressBarController.addressBarView.superview]; + contentInset.top = CGRectGetMaxY(frame); scrollIndicatorInsets.top = contentInset.top; self.collectionView.contentInset = contentInset; @@ -200,9 +214,6 @@ - (void)updateBottomCollectionViewInset insets.bottom = keyboardHeight + self.typingIndicatorInset; self.collectionView.scrollIndicatorInsets = insets; self.collectionView.contentInset = insets; - if ([self shouldScrollToBottom]) { - [self scrollToBottomAnimated:YES]; - } self.typingIndicatorViewBottomConstraint.constant = -keyboardHeight; } @@ -210,6 +221,9 @@ - (void)updateBottomCollectionViewInset - (void)keyboardWillShow:(NSNotification *)notification { + if ([[self navigationController] modalPresentationStyle] == UIModalPresentationPopover) { + return; + } [self configureWithKeyboardNotification:notification]; } @@ -227,20 +241,16 @@ - (void)messageInputToolbarDidChangeHeight:(NSNotification *)notification return; } - CGPoint existingOffset = self.collectionView.contentOffset; - CGPoint bottomOffset = [self bottomOffsetForContentSize:self.collectionView.contentSize]; - CGFloat distanceToBottom = bottomOffset.y - existingOffset.y; - BOOL shouldScrollToBottom = distanceToBottom <= ATLMaxScrollDistanceFromBottom; - CGRect toolbarFrame = [self.view convertRect:self.messageInputToolbar.frame fromView:self.messageInputToolbar.superview]; CGFloat keyboardOnscreenHeight = CGRectGetHeight(self.view.frame) - CGRectGetMinY(toolbarFrame); if (keyboardOnscreenHeight == self.keyboardHeight) return; + + BOOL messagebarDidGrow = keyboardOnscreenHeight > self.keyboardHeight; self.keyboardHeight = keyboardOnscreenHeight; + self.typingIndicatorViewBottomConstraint.constant = -self.collectionView.scrollIndicatorInsets.bottom; [self updateBottomCollectionViewInset]; - self.typingIndicatorViewBottomConstraint.constant = -self.collectionView.scrollIndicatorInsets.bottom; - if (shouldScrollToBottom) { - self.collectionView.contentOffset = existingOffset; + if ([self shouldScrollToBottom] && messagebarDidGrow) { [self scrollToBottomAnimated:YES]; } } @@ -323,7 +333,7 @@ - (void)configureCollectionViewLayoutConstraints - (void)configureTypingIndicatorLayoutConstraints { - // Typing Indicatr + // Typing Indicator [self.view addConstraint:[NSLayoutConstraint constraintWithItem:self.typingIndicatorController.view attribute:NSLayoutAttributeLeft relatedBy:NSLayoutRelationEqual toItem:self.view attribute:NSLayoutAttributeLeft multiplier:1.0 constant:0]]; [self.view addConstraint:[NSLayoutConstraint constraintWithItem:self.typingIndicatorController.view attribute:NSLayoutAttributeWidth relatedBy:NSLayoutRelationEqual toItem:self.view attribute:NSLayoutAttributeWidth multiplier:1.0 constant:0]]; [self.view addConstraint:[NSLayoutConstraint constraintWithItem:self.typingIndicatorController.view attribute:NSLayoutAttributeHeight relatedBy:NSLayoutRelationEqual toItem:nil attribute:NSLayoutAttributeNotAnAttribute multiplier:1.0 constant:ATLTypingIndicatorHeight]]; diff --git a/Code/Controllers/ATLConversationListViewController.h b/Code/Controllers/ATLConversationListViewController.h index 52443fa12..b5d6cc068 100644 --- a/Code/Controllers/ATLConversationListViewController.h +++ b/Code/Controllers/ATLConversationListViewController.h @@ -92,6 +92,57 @@ */ - (id)conversationListViewController:(ATLConversationListViewController *)conversationListViewController avatarItemForConversation:(LYRConversation *)conversation; +/** + @abstract Asks the data source for the table view cell reuse identifier for a conversation. + @param conversationListViewController The `ATLConversationListViewController` requesting the string. + @return A string that will be used to dequeue a cell from the table view. + @discussion Applications that wish to use prototype cells from a UIStoryboard in the ATLConversationListViewController cannot register their cells programmatically. + The cell must be given a reuse identifier in the UIStoryboard and that string needs to be passed into the ATLConversationListViewController so it can properly dequeue a + reuseable cell. If 'nil' is returned, the table view will default to internal values for reuse identifiers. + */ +- (NSString *)reuseIdentifierForConversationListViewController:(ATLConversationListViewController *)conversationListViewController; + +/** + @abstract Asks the data source for a string to display on the delete button for a given deletion mode. + @param conversationListViewController The `LYRConversationListViewController` in which the button title will appear. + @param deletionMode The `LYRDeletionMode` for which a button has to be displayed. + @return The string to be displayed on the delete button for a given deletion mode in the conversation list. + */ +- (NSString *)conversationListViewController:(ATLConversationListViewController *)conversationListViewController textForButtonWithDeletionMode:(LYRDeletionMode)deletionMode; + +/** + @abstract Asks the data source for a color to apply to the delete button for a given deletion mode. + @param conversationListViewController The `LYRConversationListViewController` in which the button title will appear. + @param deletionMode The `LYRDeletionMode` for which a button has to be displayed. + @return The color to apply on the delete button for a given deletion mode in the conversation list. + */ +- (UIColor *)conversationListViewController:(ATLConversationListViewController *)conversationListViewController colorForButtonWithDeletionMode:(LYRDeletionMode)deletionMode; + +/** + @abstract Asks the data source for the string to display as the conversation's last sent message. + @params conversation The conversation for which the last message text should be returned. + @return A string representing the content of the last message. If `nil` is returned the controller will fall back to default behavior. + @discussion This is used when the application uses custom `MIMEType`s and wants to customize how they are displayed. + */ +- (NSString *)conversationListViewController:(ATLConversationListViewController *)conversationListViewController lastMessageTextForConversation:(LYRConversation *)conversation; + +/** + @abstract Asks the data source to configure the query used to fetch content for the controller if necessary. + @discussion The `LYRConversationListViewController` uses the following default query: + + LYRQuery *query = [LYRQuery queryWithQueryableClass:[LYRConversation class]]; + query.predicate = [LYRPredicate predicateWithProperty:@"participants" predicateOperator:LYRPredicateOperatorIsIn value:self.layerClient.authenticatedUserID]; + query.sortDescriptors = @[[NSSortDescriptor sortDescriptorWithKey:@"lastMessage.receivedAt" ascending:NO]]; + + Applications that require advanced query configuration can do so by implementing this data source method. + + @param viewController The `ATLConversationViewController` requesting the configuration. + @param defaultQuery An `LYRQuery` object with the default configuration for the controller. + @return An `LYRQuery` object with any additional configuration. +@raises `NSInvalidArgumentException` if an `LYRQuery` object is not returned. + */ +- (LYRQuery *)conversationListViewController:(ATLConversationListViewController *)viewController willLoadWithQuery:(LYRQuery *)defaultQuery; + @end /** @@ -100,9 +151,9 @@ */ @interface ATLConversationListViewController : UITableViewController -///--------------------------------------- -/// @name Initializing a Controller -///--------------------------------------- +///------------------------------------------------------- +/// @name Initializing a Conversation List View Controller +///------------------------------------------------------- /** @abstract Creates and returns a new conversation list initialized with a given `LYRClient` object. @@ -111,6 +162,17 @@ */ + (instancetype)conversationListViewControllerWithLayerClient:(LYRClient *)layerClient; +/** + @abstract Initializes a new `ATLConversationListViewController` object with the given `LYRClient` object. + @param layerClient The `LYRClient` object from which conversations will be fetched for display. + @return An `LYRConversationListViewController` object initialized with the given `LYRClient` object. + */ +- (instancetype)initWithLayerClient:(LYRClient *)layerClient; + +///------------------------------------------------------- +/// @name Configuring Layer Client, Delegate & Data Source +///------------------------------------------------------- + /** @abstract The `LYRClient` object used to initialize the controller. @discussion If using storyboards, the property must be set explicitly. @@ -130,7 +192,7 @@ @property (nonatomic, weak) id dataSource; ///---------------------------------------- -/// @name Configuration +/// @name Configuring the Conversation List ///---------------------------------------- /** @@ -154,7 +216,7 @@ @abstract Informs the receiver if it should display an avatar item representing a conversation. @discussion When `YES`, an avatar item will be displayed for every conversation cell. Typically, this image will be an avatar image representing the user or group of users. - @default `YES` + @default `NO` @raises NSInternalInconsistencyException Raised if the value is mutated after the receiver has been presented. */ @property (nonatomic, assign) BOOL displaysAvatarItem; @@ -175,4 +237,31 @@ */ @property (nonatomic, assign) CGFloat rowHeight; +///------------- +/// @name Search +///------------- + +/** + @abstract The controller used to display search results. + */ +@property (nonatomic, readonly) UISearchDisplayController *searchController; + +/** + @abstract A boolean value that determines if the controller should show a search bar and search display controller. + @discussion When `YES`, a search bar with a search display controller is shown on top of the tableview. + Should be set before the controller is presented on screen. + @default `YES`. + */ +@property (nonatomic, assign) BOOL shouldDisplaySearchController; + +///------------------------------ +/// @name Reloading Conversations +///------------------------------ + +/** + @abstract Reloads the cell for the given Conversation. + @param conversation The Conversation object to reload the corresponding cell of. Cannot be `nil`. + */ +- (void)reloadCellForConversation:(LYRConversation *)conversation; + @end diff --git a/Code/Controllers/ATLConversationListViewController.m b/Code/Controllers/ATLConversationListViewController.m index 44a3810eb..93581d4b7 100644 --- a/Code/Controllers/ATLConversationListViewController.m +++ b/Code/Controllers/ATLConversationListViewController.m @@ -20,8 +20,13 @@ #import #import "ATLConversationListViewController.h" +#import "ATLMessagingUtilities.h" static NSString *const ATLConversationCellReuseIdentifier = @"ATLConversationCellReuseIdentifier"; +static NSString *const ATLImageMIMETypePlaceholderText = @"Attachment: Image"; +static NSString *const ATLVideoMIMETypePlaceholderText = @"Attachment: Video"; +static NSString *const ATLLocationMIMETypePlaceholderText = @"Attachment: Location"; +static NSString *const ATLGIFMIMETypePlaceholderText = @"Attachment: GIF"; @interface ATLConversationListViewController () @@ -29,10 +34,14 @@ @interface ATLConversationListViewController () *conversationCell = [tableView dequeueReusableCellWithIdentifier:ATLConversationCellReuseIdentifier forIndexPath:indexPath]; + NSString *reuseIdentifier = [self reuseIdentifierForConversation:nil atIndexPath:indexPath]; + + UITableViewCell *conversationCell = [tableView dequeueReusableCellWithIdentifier:reuseIdentifier forIndexPath:indexPath]; [self configureCell:conversationCell atIndexPath:indexPath]; return conversationCell; } @@ -263,6 +291,28 @@ - (void)configureCell:(UITableViewCell *)conversation } else { @throw [NSException exceptionWithName:NSInternalInconsistencyException reason:@"Conversation View Delegate must return a conversation label" userInfo:nil]; } + + NSString *lastMessageText; + if ([self.dataSource respondsToSelector:@selector(conversationListViewController:lastMessageTextForConversation:)]) { + lastMessageText = [self.dataSource conversationListViewController:self lastMessageTextForConversation:conversation]; + } + if (!lastMessageText) { + lastMessageText = [self defaultLastMessageTextForConversation:conversation]; + } + [conversationCell updateWithLastMessageText:lastMessageText]; +} + +#pragma mark - Reloading Conversations + +- (void)reloadCellForConversation:(LYRConversation *)conversation +{ + if (!conversation) { + @throw [NSException exceptionWithName:NSInvalidArgumentException reason:@"`conversation` cannot be nil." userInfo:nil]; + } + NSIndexPath *indexPath = [self.queryController indexPathForObject:conversation]; + if (indexPath) { + [self.tableView reloadRowsAtIndexPaths:@[ indexPath ] withRowAnimation:UITableViewRowAnimationAutomatic]; + } } #pragma mark - UITableViewDelegate @@ -273,18 +323,33 @@ - (NSArray *)tableView:(UITableView *)tableView editActionsForRowAtIndexPath:(NS for (NSNumber *deletionMode in self.deletionModes) { NSString *actionString; UIColor *actionColor; - switch (deletionMode.integerValue) { - case LYRDeletionModeLocal: - actionString = @"Local"; - actionColor = [UIColor redColor]; - break; - case LYRDeletionModeAllParticipants: - actionString = @"Global"; - actionColor = [UIColor grayColor]; - break; - - default: - break; + if ([self.dataSource respondsToSelector:@selector(conversationListViewController:textForButtonWithDeletionMode:)]) { + actionString = [self.dataSource conversationListViewController:self textForButtonWithDeletionMode:deletionMode.integerValue]; + } else { + switch (deletionMode.integerValue) { + case LYRDeletionModeLocal: + actionString = ATLLocalizedString(@"atl.conversationlist.deletionmode.local.key", ATLConversationListViewControllerDeletionModeLocal, nil); + break; + case LYRDeletionModeAllParticipants: + actionString = ATLLocalizedString(@"atl.conversationlist.deletionmode.global.key", ATLConversationListViewControllerDeletionModeGlobal, nil); + break; + default: + break; + } + } + if ([self.dataSource respondsToSelector:@selector(conversationListViewController:colorForButtonWithDeletionMode:)]) { + actionColor = [self.dataSource conversationListViewController:self colorForButtonWithDeletionMode:deletionMode.integerValue]; + } else { + switch (deletionMode.integerValue) { + case LYRDeletionModeLocal: + actionColor = [UIColor redColor]; + break; + case LYRDeletionModeAllParticipants: + actionColor = [UIColor grayColor]; + break; + default: + break; + } } UITableViewRowAction *deleteAction = [UITableViewRowAction rowActionWithStyle:UITableViewRowActionStyleDefault title:actionString handler:^(UITableViewRowAction *action, NSIndexPath *indexPath) { [self deleteConversationAtIndexPath:indexPath withDeletionMode:deletionMode.integerValue]; @@ -298,7 +363,7 @@ - (NSArray *)tableView:(UITableView *)tableView editActionsForRowAtIndexPath:(NS - (void)tableView:(UITableView *)tableView commitEditingStyle:(UITableViewCellEditingStyle)editingStyle forRowAtIndexPath:(NSIndexPath *)indexPath { self.conversationToDelete = [self.queryController objectAtIndexPath:indexPath]; - UIActionSheet *actionSheet = [[UIActionSheet alloc] initWithTitle:nil delegate:self cancelButtonTitle:@"Cancel" destructiveButtonTitle:@"Global" otherButtonTitles:@"Local", nil]; + UIActionSheet *actionSheet = [[UIActionSheet alloc] initWithTitle:nil delegate:self cancelButtonTitle:@"Cancel" destructiveButtonTitle:ATLConversationListViewControllerDeletionModeGlobal otherButtonTitles:ATLConversationListViewControllerDeletionModeLocal, nil]; [actionSheet showInView:self.view]; } @@ -324,6 +389,20 @@ - (void)actionSheet:(UIActionSheet *)actionSheet clickedButtonAtIndex:(NSInteger self.conversationToDelete = nil; } +#pragma mark - Data Source + +- (NSString *)reuseIdentifierForConversation:(LYRConversation *)conversation atIndexPath:(NSIndexPath *)indexPath +{ + NSString *reuseIdentifier; + if ([self.dataSource respondsToSelector:@selector(reuseIdentifierForConversationListViewController:)]) { + reuseIdentifier = [self.dataSource reuseIdentifierForConversationListViewController:self]; + } + if (!reuseIdentifier) { + reuseIdentifier = ATLConversationCellReuseIdentifier; + } + return reuseIdentifier; +} + #pragma mark - LYRQueryControllerDelegate - (void)queryControllerWillChangeContent:(LYRQueryController *)queryController @@ -390,19 +469,26 @@ - (void)searchDisplayController:(UISearchDisplayController *)controller didLoadS - (BOOL)searchDisplayController:(UISearchDisplayController *)controller shouldReloadTableForSearchString:(NSString *)searchString { - [self.delegate conversationListViewController:self didSearchForText:searchString completion:^(NSSet *filteredParticipants) { - if (![searchString isEqualToString:controller.searchBar.text]) return; - NSSet *participantIdentifiers = [filteredParticipants valueForKey:@"participantIdentifier"]; - - LYRQuery *query = [LYRQuery queryWithClass:[LYRConversation class]]; - query.predicate = [LYRPredicate predicateWithProperty:@"participants" operator:LYRPredicateOperatorIsIn value:participantIdentifiers]; - query.sortDescriptors = @[[NSSortDescriptor sortDescriptorWithKey:@"lastMessage.receivedAt" ascending:NO]]; - self.searchQueryController = [self.layerClient queryControllerWithQuery:query]; - - NSError *error; - [self.searchQueryController execute:&error]; - [self.searchController.searchResultsTableView reloadData]; - }]; + if ([self.delegate respondsToSelector:@selector(conversationListViewController:didSearchForText:completion:)]) { + [self.delegate conversationListViewController:self didSearchForText:searchString completion:^(NSSet *filteredParticipants) { + if (![searchString isEqualToString:controller.searchBar.text]) return; + NSSet *participantIdentifiers = [filteredParticipants valueForKey:@"participantIdentifier"]; + + LYRQuery *query = [LYRQuery queryWithQueryableClass:[LYRConversation class]]; + query.predicate = [LYRPredicate predicateWithProperty:@"participants" predicateOperator:LYRPredicateOperatorIsIn value:participantIdentifiers]; + query.sortDescriptors = @[[NSSortDescriptor sortDescriptorWithKey:@"lastMessage.receivedAt" ascending:NO]]; + + NSError *error; + self.searchQueryController = [self.layerClient queryControllerWithQuery:query error:&error]; + if (!self.queryController) { + NSLog(@"LayerKit failed to create a query controller with error: %@", error); + return; + } + + [self.searchQueryController execute:&error]; + [self.searchController.searchResultsTableView reloadData]; + }]; + } return NO; } @@ -417,6 +503,29 @@ - (LYRQueryController *)queryController #pragma mark - Helpers +- (NSString *)defaultLastMessageTextForConversation:(LYRConversation *)conversation +{ + NSString *lastMessageText; + LYRMessage *lastMessage = conversation.lastMessage; + LYRMessagePart *messagePart = lastMessage.parts[0]; + if ([messagePart.MIMEType isEqualToString:ATLMIMETypeTextPlain]) { + lastMessageText = [[NSString alloc] initWithData:messagePart.data encoding:NSUTF8StringEncoding]; + } else if ([messagePart.MIMEType isEqualToString:ATLMIMETypeImageJPEG]) { + lastMessageText = ATLLocalizedString(@"atl.conversationlist.lastMessage.text.text.key", ATLImageMIMETypePlaceholderText, nil); + } else if ([messagePart.MIMEType isEqualToString:ATLMIMETypeImagePNG]) { + lastMessageText = ATLLocalizedString(@"atl.conversationlist.lastMessage.text.png.key", ATLImageMIMETypePlaceholderText, nil); + } else if ([messagePart.MIMEType isEqualToString:ATLMIMETypeImageGIF]) { + lastMessageText = ATLLocalizedString(@"atl.conversationlist.lastMessage.text.gif.key", ATLGIFMIMETypePlaceholderText, nil); + } else if ([messagePart.MIMEType isEqualToString:ATLMIMETypeLocation]) { + lastMessageText = ATLLocalizedString(@"atl.conversationlist.lastMessage.text.location.key", ATLLocationMIMETypePlaceholderText, nil); + } else if ([messagePart.MIMEType isEqualToString:ATLMIMETypeVideoMP4]) { + lastMessageText = ATLLocalizedString(@"atl.conversationlist.lastMessage.text.video.key", ATLVideoMIMETypePlaceholderText, nil); + } else { + lastMessageText = ATLLocalizedString(@"atl.conversationlist.lastMessage.text.default.key", ATLImageMIMETypePlaceholderText, nil); + } + return lastMessageText; +} + - (void)deleteConversationAtIndexPath:(NSIndexPath *)indexPath withDeletionMode:(LYRDeletionMode)deletionMode { LYRConversation *conversation = [self.queryController objectAtIndexPath:indexPath]; diff --git a/Code/Controllers/ATLConversationViewController.h b/Code/Controllers/ATLConversationViewController.h index b49f1d0bb..705671000 100644 --- a/Code/Controllers/ATLConversationViewController.h +++ b/Code/Controllers/ATLConversationViewController.h @@ -24,6 +24,11 @@ #import "ATLParticipant.h" #import "ATLBaseConversationViewController.h" +typedef NS_ENUM(NSUInteger, ATLAvatarItemDisplayFrequency) { + ATLAvatarItemDisplayFrequencySection, + ATLAvatarItemDisplayFrequencyCluster, + ATLAvatarItemDisplayFrequencyAll +}; @class ATLConversationViewController; @protocol ATLMessagePresenting; @@ -68,18 +73,28 @@ */ - (CGFloat)conversationViewController:(ATLConversationViewController *)viewController heightForMessage:(LYRMessage *)message withCellWidth:(CGFloat)cellWidth; +/** + @abstract Informs the delegate of a cell being configured for the specified message. + @param viewController The `ATLConversationViewController` where the message cell will appear. + @param cell The `UICollectionViewCell` object that confirms to the `ATLMessagePresenting` protocol that will be displayed in the controller. + @param message The `LYRMessage` object that will be displayed in the cell. + @discussion Applications should implement this method if they want add further configuration that is not set up during cell initialization, such as gesture recognizers. + It is up to the application to typecast the cell to access custom cell properties. + */ +- (void)conversationViewController:(ATLConversationViewController *)conversationViewController configureCell:(UICollectionViewCell *)cell forMessage:(LYRMessage *)message; + /** @abstract Asks the delegate for an `NSOrderedSet` of `LYRMessage` objects representing an `NSArray` of content parts. @param viewController The `ATLConversationViewController` supplying the content parts. @param mediaAttachments The array of `ATLMediaAttachment` items supplied via user input into the `messageInputToolbar` property of the controller. @return An `NSOrderedSet` of `LYRMessage` objects. If `nil` is returned, the controller will fall back to default behavior. If an empty `NSOrderedSet` is returned, the controller will not send any messages. - @discussion Called when a user taps the `SEND` button on an `ATLMessageInputToolbar`. The media attachments array supplied can contain - any media type, such as text, images, GPS location information. Applications who wish to send `LYRMessage` objects with custom `LYRMessagePart` - MIME types not supported by default by Atlas can do so by implementing this method. All `LYRMessage` objects returned will be immediately - sent into the current conversation for the controller. If implemented, applications should also register custom `UICollectionViewCell` classes - with the controller via a call to `registerClass:forMessageCellWithReuseIdentifier:`. They should also implement the optional data source method, - `conversationViewController:reuseIdentifierForMessage:`. + @discussion Called when a user taps the `rightAccessoryButton` on an `ATLMessageInputToolbar`. The media attachments array supplied can contain + any media type, such as text, images, GPS location information. The media attachment array can also be empty, which indicates the `rightAccessoryButton` + was tapped when it contained no content, ie. the location share. Applications who wish to send `LYRMessage` objects with custom `LYRMessagePart` MIME + types not supported by default by Atlas can do so by implementing this method. All `LYRMessage` objects returned will be immediately sent into the + current conversation for the controller. If implemented, applications should also register custom `UICollectionViewCell` classes with the controller via + a call to `registerClass:forMessageCellWithReuseIdentifier:`. They should also implement the optional data source method, `conversationViewController:reuseIdentifierForMessage:`. */ - (NSOrderedSet *)conversationViewController:(ATLConversationViewController *)viewController messagesForMediaAttachments:(NSArray *)mediaAttachments; @@ -142,6 +157,23 @@ */ - (LYRConversation *)conversationViewController:(ATLConversationViewController *)viewController conversationWithParticipants:(NSSet *)participants; +/** + @abstract Asks the data source to configure the default query used to fetch content for the controller if necessary. + @discussion The `LYRConversationViewController` uses the following default query: + + LYRQuery *query = [LYRQuery queryWithQueryableClass:[LYRMessage class]]; + query.predicate = [LYRPredicate predicateWithProperty:@"conversation" predicateOperator:LYRPredicateOperatorIsEqualTo value:self.conversation]; + query.sortDescriptors = @[[NSSortDescriptor sortDescriptorWithKey:@"position" ascending:YES]]; + + Applications that require advanced query configuration can do so by implementing this data source method. + + @param viewController The `ATLConversationViewController` requesting the configuration. + @param defaultQuery An `LYRQuery` object with the default configuration for the controller. + @return An `LYRQuery` object with any additional configuration. + @raises `NSInvalidArgumentException` if an `LYRQuery` object is not returned. + */ +- (LYRQuery *)conversationViewController:(ATLConversationViewController *)viewController willLoadWithQuery:(LYRQuery *)defaultQuery; + @end /** @@ -149,7 +181,7 @@ a Layer conversation and the ability to send messages. The controller's design and functionality closely correlates with the conversation view controller in Messages. */ -@interface ATLConversationViewController : ATLBaseConversationViewController +@interface ATLConversationViewController : ATLBaseConversationViewController ///--------------------------------------- /// @name Initializing a Controller @@ -162,6 +194,13 @@ */ + (instancetype)conversationViewControllerWithLayerClient:(LYRClient *)layerClient; +/** + @abstract Initializes a new `ATLConversationViewController` object with the given `LYRClient` object. + @param layerClient The `LYRClient` object from which to retrieve the messages for display. + @return An `LYRConversationViewController` object initialized with the given `LYRClient` object. + */ +- (instancetype)initWithLayerClient:(LYRClient *)layerClient; + /** @abstract The `LYRClient` object used to initialize the controller. @discussion If using storyboards, the property must be set explicitly. @@ -174,6 +213,11 @@ */ @property (nonatomic) LYRConversation *conversation; +/** + @abstract The `LYRQueryController` object managing data displayed in the controller. + */ +@property (nonatomic, readonly) LYRQueryController *queryController; + /** @abstract The `ATLConversationViewControllerDelegate` class informs the receiver to specific events that occurred within the controller. */ @@ -192,13 +236,17 @@ - (void)registerClass:(Class)cellClass forMessageCellWithReuseIdentifier:(NSString *)reuseIdentifier; /** - @abstract Returns the `UICollectionViewCell` corresponding to the provided `LYRMessage` object. - @param message The LYRMessage object used to acquire the cell. - @return A `UICollectionViewCell` object conforming to the `ATLMessagePresenting` protocol. - @discussion If the provided `LYRMessage` object is not in the current results set of the controller, or the corresponding cell is - not currently visible, the method may return nil. + @abstract Reloads the cell for the given Message. + @param message The Message object to reload the corresponding cell of. Cannot be `nil`. */ -- (UICollectionViewCell *)collectionViewCellForMessage:(LYRMessage *)message; +- (void)reloadCellForMessage:(LYRMessage *)message; + +/** + @abstract Reloads the cells for all messages sent by the participant with the given identifier. + @discussion This method is useful after the completion of asynchronous user resolution activities. + @param participantIdentifier The identifier of the participant whose messages are to be reloaded. + */ +- (void)reloadCellsForMessagesSentByParticipantWithIdentifier:(NSString *)participantIdentifier; /** @abstract Informs the reciever that it should send a message with the current location of the device. @@ -206,9 +254,9 @@ */ - (void)sendLocationMessage; -///--------------------------------------- -/// @name Configuration -///--------------------------------------- +///--------------------------- +/// @name Configuring Behavior +///--------------------------- /** @abstract The time interval at which message dates should be displayed in seconds. Default is 60 minutes meaning that @@ -217,10 +265,29 @@ @property (nonatomic) NSTimeInterval dateDisplayTimeInterval; /** -@abstract A Boolean value that determines whether or not the controller marks all messages as read. + @abstract A Boolean value that determines whether or not the controller marks all messages as read. @discussion If `YES`, the controller will mark all messages in the conversation as read when it is presented. @default `YES`. */ @property (nonatomic) BOOL marksMessagesAsRead; +/** + @abstract A Boolean value that determines whether or not an avatar is shown if there is only one other participant in the conversation. + @default `NO`. + Should be set before `[super viewDidLoad]` is called. + */ +@property (nonatomic) BOOL shouldDisplayAvatarItemForOneOtherParticipant; + +/** + @abstract A Boolean value that determines whether or not an avatar is shown next to the outgoing messages + @default `NO`. + */ +@property (nonatomic) BOOL shouldDisplayAvatarItemForAuthenticatedUser; + +/** + @abstract An Enum value that determines how often avatar items should be shown next to messages. + @default 'ATLAvatarItemDisplayFrequencySection'. + */ +@property (nonatomic) ATLAvatarItemDisplayFrequency avatarItemDisplayFrequency; + @end diff --git a/Code/Controllers/ATLConversationViewController.m b/Code/Controllers/ATLConversationViewController.m index 4bf97d27e..d2527e2b3 100644 --- a/Code/Controllers/ATLConversationViewController.m +++ b/Code/Controllers/ATLConversationViewController.m @@ -20,6 +20,7 @@ #import #import +#import #import "ATLConversationViewController.h" #import "ATLConversationCollectionView.h" #import "ATLConstants.h" @@ -29,10 +30,12 @@ #import "ATLConversationDataSource.h" #import "ATLMediaAttachment.h" #import "ATLLocationManager.h" +@import AVFoundation; -@interface ATLConversationViewController () +@interface ATLConversationViewController () @property (nonatomic) ATLConversationDataSource *conversationDataSource; +@property (nonatomic, readwrite) LYRQueryController *queryController; @property (nonatomic) BOOL shouldDisplayAvatarItem; @property (nonatomic) NSMutableOrderedSet *typingParticipantIDs; @property (nonatomic) NSMutableArray *objectChanges; @@ -45,6 +48,7 @@ @interface ATLConversationViewController () 0) { + [self cacheMediaAttachments]; + } self.collectionView.delegate = nil; [[NSNotificationCenter defaultCenter] removeObserver:self]; } +- (void)cacheMediaAttachments +{ + [[[self class] sharedMediaAttachmentCache] setObject:self.messageInputToolbar.mediaAttachments forKey:self.conversation.identifier]; +} + +- (void)loadCachedMediaAttachments +{ + NSArray *mediaAttachments = [[[self class] sharedMediaAttachmentCache] objectForKey:self.conversation.identifier]; + for (int i = 0; i < mediaAttachments.count; i++) { + ATLMediaAttachment *attachment = [mediaAttachments objectAtIndex:i]; + BOOL shouldHaveLineBreak = (i < mediaAttachments.count - 1) || !(attachment.mediaMIMEType == ATLMIMETypeTextPlain); + [self.messageInputToolbar insertMediaAttachment:attachment withEndLineBreak:shouldHaveLineBreak]; + } + [[[self class] sharedMediaAttachmentCache] removeObjectForKey:self.conversation.identifier]; +} + #pragma mark - Conversation Data Source Setup - (void)setConversation:(LYRConversation *)conversation @@ -184,8 +233,24 @@ - (void)setConversation:(LYRConversation *)conversation - (void)fetchLayerMessages { if (!self.conversation) return; - self.conversationDataSource = [ATLConversationDataSource dataSourceWithLayerClient:self.layerClient conversation:self.conversation]; + + LYRQuery *query = [LYRQuery queryWithQueryableClass:[LYRMessage class]]; + query.predicate = [LYRPredicate predicateWithProperty:@"conversation" predicateOperator:LYRPredicateOperatorIsEqualTo value:self.conversation]; + query.sortDescriptors = @[[NSSortDescriptor sortDescriptorWithKey:@"position" ascending:YES]]; + + if ([self.dataSource respondsToSelector:@selector(conversationViewController:willLoadWithQuery:)]) { + query = [self.dataSource conversationViewController:self willLoadWithQuery:query]; + if (![query isKindOfClass:[LYRQuery class]]){ + @throw [NSException exceptionWithName:NSInvalidArgumentException reason:@"Data source must return an `LYRQuery` object." userInfo:nil]; + } + } + + self.conversationDataSource = [ATLConversationDataSource dataSourceWithLayerClient:self.layerClient query:query]; + if (!self.conversationDataSource) { + return; + } self.conversationDataSource.queryController.delegate = self; + self.queryController = self.conversationDataSource.queryController; self.showingMoreMessagesIndicator = [self.conversationDataSource moreMessagesAvailable]; [self.collectionView reloadData]; } @@ -197,7 +262,7 @@ - (void)configureControllerForConversation // Configure avatar image display NSMutableSet *otherParticipantIDs = [self.conversation.participants mutableCopy]; if (self.layerClient.authenticatedUserID) [otherParticipantIDs removeObject:self.layerClient.authenticatedUserID]; - self.shouldDisplayAvatarItem = otherParticipantIDs.count > 1; + self.shouldDisplayAvatarItem = (otherParticipantIDs.count > 1) ? YES : self.shouldDisplayAvatarItemForOneOtherParticipant; // Configure message bar button enablement BOOL shouldEnableButton = self.conversation ? YES : NO; @@ -205,7 +270,7 @@ - (void)configureControllerForConversation self.messageInputToolbar.leftAccessoryButton.enabled = shouldEnableButton; // Mark all messages as read if needed - if (self.conversation.lastMessage) { + if (self.conversation.lastMessage && self.marksMessagesAsRead) { [self.conversation markAllMessagesAsRead:nil]; } } @@ -230,11 +295,10 @@ - (void)configureAddressBarForConversation - (NSInteger)collectionView:(UICollectionView *)collectionView numberOfItemsInSection:(NSInteger)section { if (section == ATLMoreMessagesSection) return 0; - // Each message is represented by one cell no matter how many parts it has. return 1; } - + /** Atlas - The `ATLConversationViewController` component uses `LYRMessage` objects to represent sections. */ @@ -253,6 +317,9 @@ - (UICollectionViewCell *)collectionView:(UICollectionView *)collectionView cell UICollectionViewCell *cell = [self.collectionView dequeueReusableCellWithReuseIdentifier:reuseIdentifier forIndexPath:indexPath]; [self configureCell:cell forMessage:message indexPath:indexPath]; + if ([self.delegate respondsToSelector:@selector(conversationViewController:configureCell:forMessage:)]) { + [self.delegate conversationViewController:self configureCell:cell forMessage:message]; + } return cell; } @@ -350,14 +417,15 @@ - (void)scrollViewDidScrollToTop:(UIScrollView *)scrollView - (void)configureCell:(UICollectionViewCell *)cell forMessage:(LYRMessage *)message indexPath:(NSIndexPath *)indexPath { [cell presentMessage:message]; - [cell shouldDisplayAvatarItem:self.shouldDisplayAvatarItem]; + BOOL willDisplayAvatarItem = (![message.sender.userID isEqualToString:self.layerClient.authenticatedUserID]) ? self.shouldDisplayAvatarItem : (self.shouldDisplayAvatarItem && self.shouldDisplayAvatarItemForAuthenticatedUser); + [cell shouldDisplayAvatarItem:willDisplayAvatarItem]; if ([self shouldDisplayAvatarItemAtIndexPath:indexPath]) { - [cell updateWithSender:[self participantForIdentifier:message.sentByUserID]]; + [cell updateWithSender:[self participantForIdentifier:message.sender.userID]]; } else { [cell updateWithSender:nil]; } - if (message.isUnread) { + if (message.isUnread && [[UIApplication sharedApplication] applicationState] == UIApplicationStateActive && self.marksMessagesAsRead) { [message markAsRead:nil]; } } @@ -390,7 +458,11 @@ - (void)configureHeader:(ATLConversationCollectionViewHeader *)header atIndexPat - (CGFloat)defaultCellHeightForItemAtIndexPath:(NSIndexPath *)indexPath { LYRMessage *message = [self.conversationDataSource messageAtCollectionViewIndexPath:indexPath]; - return [ATLMessageCollectionViewCell cellHeightForMessage:message inView:self.view]; + if ([message.sender.userID isEqualToString:self.layerClient.authenticatedUserID]) { + return [ATLOutgoingMessageCollectionViewCell cellHeightForMessage:message inView:self.view]; + } else { + return [ATLIncomingMessageCollectionViewCell cellHeightForMessage:message inView:self.view]; + } } - (BOOL)shouldDisplayDateLabelForSection:(NSUInteger)section @@ -415,11 +487,10 @@ - (BOOL)shouldDisplaySenderLabelForSection:(NSUInteger)section if (self.conversation.participants.count <= 2) return NO; LYRMessage *message = [self.conversationDataSource messageAtCollectionViewSection:section]; - if ([message.sentByUserID isEqualToString:self.layerClient.authenticatedUserID]) return NO; - + if ([message.sender.userID isEqualToString:self.layerClient.authenticatedUserID]) return NO; if (section > ATLNumberOfSectionsBeforeFirstMessageSection) { LYRMessage *previousMessage = [self.conversationDataSource messageAtCollectionViewSection:section - 1]; - if ([previousMessage.sentByUserID isEqualToString:message.sentByUserID]) { + if ([previousMessage.sender.userID isEqualToString:message.sender.userID]) { return NO; } } @@ -432,9 +503,8 @@ - (BOOL)shouldDisplayReadReceiptForSection:(NSUInteger)section NSInteger lastQueryControllerRow = [self.conversationDataSource.queryController numberOfObjectsInSection:0] - 1; NSInteger lastSection = [self.conversationDataSource collectionViewSectionForQueryControllerRow:lastQueryControllerRow]; if (section != lastSection) return NO; - LYRMessage *message = [self.conversationDataSource messageAtCollectionViewSection:section]; - if (![message.sentByUserID isEqualToString:self.layerClient.authenticatedUserID]) return NO; + if (![message.sender.userID isEqualToString:self.layerClient.authenticatedUserID]) return NO; return YES; } @@ -457,18 +527,23 @@ - (BOOL)shouldClusterMessageAtSection:(NSUInteger)section - (BOOL)shouldDisplayAvatarItemAtIndexPath:(NSIndexPath *)indexPath { if (!self.shouldDisplayAvatarItem) return NO; - LYRMessage *message = [self.conversationDataSource messageAtCollectionViewIndexPath:indexPath]; - if ([message.sentByUserID isEqualToString:self.layerClient.authenticatedUserID]) { + if (message.sender.userID == nil) { + return NO; + } + + if ([message.sender.userID isEqualToString:self.layerClient.authenticatedUserID] && !self.shouldDisplayAvatarItemForAuthenticatedUser) { return NO; } - + if (![self shouldClusterMessageAtSection:indexPath.section] && self.avatarItemDisplayFrequency == ATLAvatarItemDisplayFrequencyCluster) { + return YES; + } NSInteger lastQueryControllerRow = [self.conversationDataSource.queryController numberOfObjectsInSection:0] - 1; NSInteger lastSection = [self.conversationDataSource collectionViewSectionForQueryControllerRow:lastQueryControllerRow]; if (indexPath.section < lastSection) { LYRMessage *nextMessage = [self.conversationDataSource messageAtCollectionViewSection:indexPath.section + 1]; // If the next message is sent by the same user, no - if ([nextMessage.sentByUserID isEqualToString:message.sentByUserID]) { + if ([nextMessage.sender.userID isEqualToString:message.sender.userID] && self.avatarItemDisplayFrequency != ATLAvatarItemDisplayFrequencyAll) { return NO; } } @@ -479,12 +554,17 @@ - (BOOL)shouldDisplayAvatarItemAtIndexPath:(NSIndexPath *)indexPath - (void)messageInputToolbar:(ATLMessageInputToolbar *)messageInputToolbar didTapLeftAccessoryButton:(UIButton *)leftAccessoryButton { + if (messageInputToolbar.textInputView.isFirstResponder) { + [messageInputToolbar.textInputView resignFirstResponder]; + } + UIActionSheet *actionSheet = [[UIActionSheet alloc] initWithTitle:nil delegate:self - cancelButtonTitle:@"Cancel" + cancelButtonTitle:ATLLocalizedString(@"atl.conversation.toolbar.actionsheet.cancel.key", @"Cancel", nil) destructiveButtonTitle:nil - otherButtonTitles:@"Take Photo", @"Last Photo Taken", @"Photo Library", nil]; + otherButtonTitles:ATLLocalizedString(@"atl.conversation.toolbar.actionsheet.takephoto.key", @"Take Photo/Video", nil), ATLLocalizedString(@"atl.conversation.toolbar.actionsheet.lastphoto.key", @"Last Photo/Video", nil), ATLLocalizedString(@"atl.conversation.toolbar.actionsheet.library.key", @"Photo/Video Library", nil), nil]; [actionSheet showInView:self.view]; + actionSheet.tag = ATLPhotoActionSheet; } - (void)messageInputToolbar:(ATLMessageInputToolbar *)messageInputToolbar didTapRightAccessoryButton:(UIButton *)rightAccessoryButton @@ -492,13 +572,15 @@ - (void)messageInputToolbar:(ATLMessageInputToolbar *)messageInputToolbar didTap if (!self.conversation) { return; } - if (messageInputToolbar.mediaAttachments.count) { - NSOrderedSet *messages = [self messagesForMediaAttachments:messageInputToolbar.mediaAttachments]; + + // If there's no content in the input field, send the location. + NSOrderedSet *messages = [self messagesForMediaAttachments:messageInputToolbar.mediaAttachments]; + if (messages.count == 0 && messageInputToolbar.textInputView.text.length == 0) { + [self sendLocationMessage]; + } else { for (LYRMessage *message in messages) { [self sendMessage:message]; } - } else { - [self sendLocationMessage]; } if (self.addressBarController) [self.addressBarController disable]; } @@ -522,16 +604,33 @@ - (NSOrderedSet *)defaultMessagesForMediaAttachments:(NSArray *)mediaAttachments NSMutableOrderedSet *messages = [NSMutableOrderedSet new]; for (ATLMediaAttachment *attachment in mediaAttachments){ NSArray *messageParts = ATLMessagePartsWithMediaAttachment(attachment); - LYRMessage *message = [self messageForMessageParts:messageParts pushText:attachment.textRepresentation]; + LYRMessage *message = [self messageForMessageParts:messageParts MIMEType:attachment.mediaMIMEType pushText:(([attachment.mediaMIMEType isEqualToString:ATLMIMETypeTextPlain]) ? attachment.textRepresentation : nil)]; if (message)[messages addObject:message]; } return messages; } -- (LYRMessage *)messageForMessageParts:(NSArray *)parts pushText:(NSString *)pushText; +- (LYRMessage *)messageForMessageParts:(NSArray *)parts MIMEType:(NSString *)MIMEType pushText:(NSString *)pushText; { NSString *senderName = [[self participantForIdentifier:self.layerClient.authenticatedUserID] fullName]; - NSDictionary *pushOptions = @{LYRMessageOptionsPushNotificationAlertKey : [NSString stringWithFormat:@"%@: %@", senderName, pushText], + NSString *completePushText; + if (!pushText) { + if ([MIMEType isEqualToString:ATLMIMETypeImageGIF]) { + completePushText = [NSString stringWithFormat:@"%@ %@", senderName, ATLDefaultPushAlertGIF]; + } else if ([MIMEType isEqualToString:ATLMIMETypeImagePNG] || [MIMEType isEqualToString:ATLMIMETypeImageJPEG]) { + completePushText = [NSString stringWithFormat:@"%@ %@", senderName, ATLDefaultPushAlertImage]; + } else if ([MIMEType isEqualToString:ATLMIMETypeLocation]) { + completePushText = [NSString stringWithFormat:@"%@ %@", senderName, ATLDefaultPushAlertLocation]; + } else if ([MIMEType isEqualToString:ATLMIMETypeVideoMP4]){ + completePushText = [NSString stringWithFormat:@"%@ %@", senderName, ATLDefaultPushAlertVideo]; + } else { + completePushText = [NSString stringWithFormat:@"%@ %@", senderName, ATLDefaultPushAlertText]; + } + } else { + completePushText = [NSString stringWithFormat:@"%@: %@", senderName, pushText]; + } + + NSDictionary *pushOptions = @{LYRMessageOptionsPushNotificationAlertKey : completePushText, LYRMessageOptionsPushNotificationSoundNameKey : ATLPushNotificationSoundName}; NSError *error; LYRMessage *message = [self.layerClient newMessageWithParts:parts options:pushOptions error:&error]; @@ -552,7 +651,7 @@ - (void)sendMessage:(LYRMessage *)message } } -#pragma mark - Location Message +#pragma mark - Location Message - (void)sendLocationMessage { @@ -582,7 +681,7 @@ - (void)locationManager:(CLLocationManager *)manager didUpdateLocations:(NSArray - (void)sendMessageWithLocation:(CLLocation *)location { ATLMediaAttachment *attachement = [ATLMediaAttachment mediaAttachmentWithLocation:location]; - LYRMessage *message = [self messageForMessageParts:ATLMessagePartsWithMediaAttachment(attachement) pushText:@"Attachement: Location"]; + LYRMessage *message = [self messageForMessageParts:ATLMessagePartsWithMediaAttachment(attachement) MIMEType:ATLMIMETypeLocation pushText:nil]; [self sendMessage:message]; } @@ -590,21 +689,23 @@ - (void)sendMessageWithLocation:(CLLocation *)location - (void)actionSheet:(UIActionSheet *)actionSheet didDismissWithButtonIndex:(NSInteger)buttonIndex { - switch (buttonIndex) { - case 0: - [self displayImagePickerWithSourceType:UIImagePickerControllerSourceTypeCamera]; - break; - - case 1: - [self captureLastPhotoTaken]; - break; - - case 2: - [self displayImagePickerWithSourceType:UIImagePickerControllerSourceTypePhotoLibrary]; - break; - - default: - break; + if (actionSheet.tag == ATLPhotoActionSheet) { + switch (buttonIndex) { + case 0: + [self displayImagePickerWithSourceType:UIImagePickerControllerSourceTypeCamera]; + break; + + case 1: + [self captureLastPhotoTaken]; + break; + + case 2: + [self displayImagePickerWithSourceType:UIImagePickerControllerSourceTypePhotoLibrary]; + break; + + default: + break; + } } } @@ -617,7 +718,9 @@ - (void)displayImagePickerWithSourceType:(UIImagePickerControllerSourceType)sour if (pickerSourceTypeAvailable) { UIImagePickerController *picker = [[UIImagePickerController alloc] init]; picker.delegate = self; + picker.mediaTypes = [UIImagePickerController availableMediaTypesForSourceType:sourceType]; picker.sourceType = sourceType; + picker.videoQuality = UIImagePickerControllerQualityTypeHigh; [self.navigationController presentViewController:picker animated:YES completion:nil]; } } @@ -629,7 +732,7 @@ - (void)captureLastPhotoTaken NSLog(@"Failed to capture last photo with error: %@", [error localizedDescription]); } else { ATLMediaAttachment *mediaAttachment = [ATLMediaAttachment mediaAttachmentWithAssetURL:assetURL thumbnailSize:ATLDefaultThumbnailSize]; - [self.messageInputToolbar insertMediaAttachment:mediaAttachment]; + [self.messageInputToolbar insertMediaAttachment:mediaAttachment withEndLineBreak:YES]; } }); } @@ -638,24 +741,28 @@ - (void)captureLastPhotoTaken - (void)imagePickerController:(UIImagePickerController *)picker didFinishPickingMediaWithInfo:(NSDictionary *)info { - NSString *mediaType = info[UIImagePickerControllerMediaType]; - if ([mediaType isEqualToString:(__bridge NSString *)kUTTypeImage]) { - NSURL *assetURL = (NSURL *)info[UIImagePickerControllerReferenceURL]; - ATLMediaAttachment *mediaAttachment; - if (assetURL) { - mediaAttachment = [ATLMediaAttachment mediaAttachmentWithAssetURL:assetURL thumbnailSize:ATLDefaultThumbnailSize]; - } else if (info[UIImagePickerControllerOriginalImage]) { - mediaAttachment = [ATLMediaAttachment mediaAttachmentWithImage:info[UIImagePickerControllerOriginalImage] - metadata:info[UIImagePickerControllerMediaMetadata] - thumbnailSize:ATLDefaultThumbnailSize]; - } else { - return; - } - [self.messageInputToolbar insertMediaAttachment:mediaAttachment]; + ATLMediaAttachment *mediaAttachment; + if (info[UIImagePickerControllerMediaURL]) { + // Video recorded within the app or was picked and edited in + // the image picker. + NSURL *moviePath = [NSURL fileURLWithPath:(NSString *)[[info objectForKey:UIImagePickerControllerMediaURL] path]]; + mediaAttachment = [ATLMediaAttachment mediaAttachmentWithFileURL:moviePath thumbnailSize:ATLDefaultThumbnailSize]; + } else if (info[UIImagePickerControllerReferenceURL]) { + // Photo taken or video recorded within the app. + mediaAttachment = [ATLMediaAttachment mediaAttachmentWithAssetURL:info[UIImagePickerControllerReferenceURL] thumbnailSize:ATLDefaultThumbnailSize]; + } else if (info[UIImagePickerControllerOriginalImage]) { + // Image picked from the image picker. + mediaAttachment = [ATLMediaAttachment mediaAttachmentWithImage:info[UIImagePickerControllerOriginalImage] metadata:info[UIImagePickerControllerMediaMetadata] thumbnailSize:ATLDefaultThumbnailSize]; + } else { + return; + } + + if (mediaAttachment) { + [self.messageInputToolbar insertMediaAttachment:mediaAttachment withEndLineBreak:YES]; } [self.navigationController dismissViewControllerAnimated:YES completion:nil]; [self.view becomeFirstResponder]; - + // Workaround for collection view not displayed on iOS 7.1. [self.collectionView setNeedsLayout]; } @@ -664,7 +771,7 @@ - (void)imagePickerControllerDidCancel:(UIImagePickerController *)picker { [self.navigationController dismissViewControllerAnimated:YES completion:nil]; [self.view becomeFirstResponder]; - + // Workaround for collection view not displayed on iOS 7.1. [self.collectionView setNeedsLayout]; } @@ -696,14 +803,11 @@ - (void)layerClientObjectsDidChange:(NSNotification *)notification if (![notification.object isEqual:self.layerClient]) return; NSArray *changes = notification.userInfo[LYRClientObjectChangesUserInfoKey]; - for (NSDictionary *change in changes) { - id changedObject = change[LYRObjectChangeObjectKey]; - if (![changedObject isEqual:self.conversation]) continue; - - LYRObjectChangeType changeType = [change[LYRObjectChangeTypeKey] integerValue]; - NSString *changedProperty = change[LYRObjectChangePropertyKey]; - - if (changeType == LYRObjectChangeTypeUpdate && [changedProperty isEqualToString:@"participants"]) { + for (LYRObjectChange *change in changes) { + if (![change.object isEqual:self.conversation]) { + continue; + } + if (change.type == LYRObjectChangeTypeUpdate && [change.property isEqualToString:@"participants"]) { [self configureControllerForChangedParticipants]; break; } @@ -712,7 +816,7 @@ - (void)layerClientObjectsDidChange:(NSNotification *)notification - (void)handleApplicationWillEnterForeground:(NSNotification *)notification { - if (self.conversation) { + if (self.conversation && self.marksMessagesAsRead) { NSError *error; BOOL success = [self.conversation markAllMessagesAsRead:&error]; if (!success) { @@ -747,10 +851,12 @@ - (void)configureControllerForChangedParticipants [self configureConversationForAddressBar]; return; } - NSMutableSet *removedParticipantIdentifiers = [self.typingParticipantIDs copy]; - [removedParticipantIdentifiers minusSet:self.conversation.participants]; - [self.typingParticipantIDs removeObjectsInArray:removedParticipantIdentifiers.allObjects]; - [self updateTypingIndicatorOverlay:NO]; + NSMutableSet *removedParticipantIdentifiers = [NSMutableSet setWithArray:[self.typingParticipantIDs array]]; + if (removedParticipantIdentifiers.count) { + [removedParticipantIdentifiers minusSet:self.conversation.participants]; + [self.typingParticipantIDs removeObjectsInArray:removedParticipantIdentifiers.allObjects]; + [self updateTypingIndicatorOverlay:NO]; + } [self configureAddressBarForChangedParticipants]; [self configureControllerForConversation]; [self.collectionView reloadData]; @@ -794,13 +900,13 @@ - (void)configurePaginationWindow if (CGRectEqualToRect(self.collectionView.frame, CGRectZero)) return; if (self.collectionView.isDragging) return; if (self.collectionView.isDecelerating) return; - + CGFloat topOffset = -self.collectionView.contentInset.top; CGFloat distanceFromTop = self.collectionView.contentOffset.y - topOffset; CGFloat minimumDistanceFromTopToTriggerLoadingMore = 200; BOOL nearTop = distanceFromTop <= minimumDistanceFromTopToTriggerLoadingMore; if (!nearTop) return; - + [self.conversationDataSource expandPaginationWindow]; } @@ -849,26 +955,26 @@ - (void)configureConversationForAddressBar - (void)configureAddressBarForChangedParticipants { if (!self.addressBarController) return; - + NSOrderedSet *existingParticipants = self.addressBarController.selectedParticipants; NSOrderedSet *existingParticipantIdentifiers = [existingParticipants valueForKey:@"participantIdentifier"]; - + if (!existingParticipantIdentifiers && !self.conversation.participants) return; if ([existingParticipantIdentifiers.set isEqual:self.conversation.participants]) return; - + NSMutableOrderedSet *removedIdentifiers = [NSMutableOrderedSet orderedSetWithOrderedSet:existingParticipantIdentifiers]; [removedIdentifiers minusSet:self.conversation.participants]; - + NSMutableOrderedSet *addedIdentifiers = [NSMutableOrderedSet orderedSetWithSet:self.conversation.participants]; [addedIdentifiers minusOrderedSet:existingParticipantIdentifiers]; NSString *authenticatedUserID = self.layerClient.authenticatedUserID; if (authenticatedUserID) [addedIdentifiers removeObject:authenticatedUserID]; - + NSMutableOrderedSet *participantIdentifiers = [NSMutableOrderedSet orderedSetWithOrderedSet:existingParticipantIdentifiers]; [participantIdentifiers minusOrderedSet:removedIdentifiers]; [participantIdentifiers unionOrderedSet:addedIdentifiers]; - + NSOrderedSet *participants = [self participantsForIdentifiers:participantIdentifiers]; self.addressBarController.selectedParticipants = participants; } @@ -880,15 +986,58 @@ - (void)registerClass:(Class)cellClass forMessageCellWithR [self.collectionView registerClass:cellClass forCellWithReuseIdentifier:reuseIdentifier]; } -- (UICollectionViewCell *)collectionViewCellForMessage:(LYRMessage *)message +- (void)reloadCellForMessage:(LYRMessage *)message { - NSIndexPath *indexPath = [self.conversationDataSource.queryController indexPathForObject:message]; - if (indexPath) { - NSIndexPath *collectionViewIndexPath = [self.conversationDataSource collectionViewIndexPathForQueryControllerIndexPath:indexPath]; - UICollectionViewCell *cell = [self.collectionView cellForItemAtIndexPath:collectionViewIndexPath]; - if (cell) return (UICollectionViewCell *)cell; - } - return nil; + dispatch_async(self.animationQueue, ^{ + NSIndexPath *indexPath = [self.conversationDataSource.queryController indexPathForObject:message]; + if (indexPath) { + NSIndexPath *collectionViewIndexPath = [self.conversationDataSource collectionViewIndexPathForQueryControllerIndexPath:indexPath]; + if (collectionViewIndexPath) { + // Configure the cell, the header, and the footer + dispatch_async(dispatch_get_main_queue(), ^{ + [self configureCollectionViewElementsAtCollectionViewIndexPath:collectionViewIndexPath]; + }); + } + } + }); +} + +- (void)reloadCellsForMessagesSentByParticipantWithIdentifier:(NSString *)participantIdentifier +{ + dispatch_async(self.animationQueue, ^{ + // Query for all of the message identifiers in the conversation + LYRQuery *messageIdentifiersQuery = [self.conversationDataSource.queryController.query copy]; + messageIdentifiersQuery.resultType = LYRQueryResultTypeIdentifiers; + NSError *error = nil; + NSOrderedSet *messageIdentifiers = [self.layerClient executeQuery:messageIdentifiersQuery error:&error]; + if (!messageIdentifiers) { + NSLog(@"LayerKit failed to execute query with error: %@", error); + return; + } + + // Query for the all of the message identifiers in the above set where user == participantIdentifier + LYRQuery *query = [LYRQuery queryWithQueryableClass:[LYRMessage class]]; + LYRPredicate *senderPredicate = [LYRPredicate predicateWithProperty:@"sender.userID" predicateOperator:LYRPredicateOperatorIsEqualTo value:participantIdentifier]; + LYRPredicate *objectIdentifiersPredicate = [LYRPredicate predicateWithProperty:@"identifier" predicateOperator:LYRPredicateOperatorIsIn value:messageIdentifiers]; + query.predicate = [LYRCompoundPredicate compoundPredicateWithType:LYRCompoundPredicateTypeAnd subpredicates:@[ senderPredicate, objectIdentifiersPredicate ]]; + query.resultType = LYRQueryResultTypeIdentifiers; + NSOrderedSet *messageIdentifiersToReload = [self.layerClient executeQuery:query error:&error]; + if (!messageIdentifiers) { + NSLog(@"LayerKit failed to execute query with error: %@", error); + return; + } + + // Convert query controller index paths to collection view index paths + NSDictionary *objectIdentifiersToIndexPaths = [self.conversationDataSource.queryController indexPathsForObjectsWithIdentifiers:messageIdentifiersToReload.set]; + NSArray *queryControllerIndexPaths = [objectIdentifiersToIndexPaths allValues]; + for (NSIndexPath *indexPath in queryControllerIndexPaths) { + NSIndexPath *collectionViewIndexPath = [self.conversationDataSource collectionViewIndexPathForQueryControllerIndexPath:indexPath]; + // Configure the cell, the header, and the footer + dispatch_async(dispatch_get_main_queue(), ^{ + [self configureCollectionViewElementsAtCollectionViewIndexPath:collectionViewIndexPath]; + }); + } + }); } #pragma mark - Delegate @@ -984,7 +1133,7 @@ - (NSString *)reuseIdentifierForMessage:(LYRMessage *)message atIndexPath:(NSInd reuseIdentifier = [self.dataSource conversationViewController:self reuseIdentifierForMessage:message]; } if (!reuseIdentifier) { - if ([self.layerClient.authenticatedUserID isEqualToString:message.sentByUserID]) { + if ([self.layerClient.authenticatedUserID isEqualToString:message.sender.userID]) { reuseIdentifier = ATLOutgoingMessageCellIdentifier; } else { reuseIdentifier = ATLIncomingMessageCellIdentifier; @@ -1026,15 +1175,23 @@ - (void)queryController:(LYRQueryController *)controller [self.objectChanges addObject:[ATLDataSourceChange changeObjectWithType:type newIndex:newIndex currentIndex:currentIndex]]; } +- (void)queryControllerWillChangeContent:(LYRQueryController *)queryController +{ + // Implemented by subclass +} + - (void)queryControllerDidChangeContent:(LYRQueryController *)queryController { + NSArray *objectChanges = [self.objectChanges copy]; + [self.objectChanges removeAllObjects]; + if (self.conversationDataSource.isExpandingPaginationWindow) { self.showingMoreMessagesIndicator = [self.conversationDataSource moreMessagesAvailable]; [self reloadCollectionViewAdjustingForContentHeightChange]; return; } - if (self.objectChanges.count == 0) { + if (objectChanges.count == 0) { [self configurePaginationWindow]; [self configureMoreMessagesIndicatorVisibility]; return; @@ -1042,32 +1199,37 @@ - (void)queryControllerDidChangeContent:(LYRQueryController *)queryController // Prevent scrolling if user has scrolled up into the conversation history. BOOL shouldScrollToBottom = [self shouldScrollToBottom]; - [self.collectionView performBatchUpdates:^{ - for (ATLDataSourceChange *change in self.objectChanges) { - switch (change.type) { - case LYRQueryControllerChangeTypeInsert: - [self.collectionView insertSections:[NSIndexSet indexSetWithIndex:change.newIndex]]; - break; - - case LYRQueryControllerChangeTypeMove: - [self.collectionView moveSection:change.currentIndex toSection:change.newIndex]; - break; - - case LYRQueryControllerChangeTypeDelete: - [self.collectionView deleteSections:[NSIndexSet indexSetWithIndex:change.currentIndex]]; - break; - - case LYRQueryControllerChangeTypeUpdate: - // If we call reloadSections: for a section that is already being animated due to another move (e.g. moving section 17 to 16 causes section 16 to be moved/animated to 17 and then we also reload section 16), UICollectionView will throw an exception. But since all onscreen sections will be reconfigured (see below) we don't need to reload the sections here anyway. - break; - - default: - break; - } - } - [self.objectChanges removeAllObjects]; - } completion:nil]; + // ensure the animation's queue will resume + if (self.collectionView) { + dispatch_suspend(self.animationQueue); + [self.collectionView performBatchUpdates:^{ + for (ATLDataSourceChange *change in objectChanges) { + switch (change.type) { + case LYRQueryControllerChangeTypeInsert: + [self.collectionView insertSections:[NSIndexSet indexSetWithIndex:change.newIndex]]; + break; + + case LYRQueryControllerChangeTypeMove: + [self.collectionView moveSection:change.currentIndex toSection:change.newIndex]; + break; + + case LYRQueryControllerChangeTypeDelete: + [self.collectionView deleteSections:[NSIndexSet indexSetWithIndex:change.currentIndex]]; + break; + + case LYRQueryControllerChangeTypeUpdate: + // If we call reloadSections: for a section that is already being animated due to another move (e.g. moving section 17 to 16 causes section 16 to be moved/animated to 17 and then we also reload section 16), UICollectionView will throw an exception. But since all onscreen sections will be reconfigured (see below) we don't need to reload the sections here anyway. + break; + + default: + break; + } + } + } completion:^(BOOL finished) { + dispatch_resume(self.animationQueue); + }]; + } [self configureCollectionViewElements]; if (shouldScrollToBottom) { @@ -1104,14 +1266,43 @@ - (void)configureCollectionViewElements } } +- (void)configureCollectionViewElementsAtCollectionViewIndexPath:(NSIndexPath *)collectionViewIndexPath { + // Direct access to the message + LYRMessage *message = [self.conversationDataSource messageAtCollectionViewIndexPath:collectionViewIndexPath]; + UICollectionViewCell *cell = [self.collectionView cellForItemAtIndexPath:collectionViewIndexPath]; + if ([cell conformsToProtocol:@protocol(ATLMessagePresenting)]) { + [self configureCell:(UICollectionViewCell *)cell forMessage:message indexPath:collectionViewIndexPath]; + } + + // Find the header... + for (ATLConversationCollectionViewHeader *header in self.sectionHeaders) { + NSIndexPath *queryControllerIndexPath = [self.conversationDataSource.queryController indexPathForObject:header.message]; + if (queryControllerIndexPath && [header.message.identifier isEqual:message.identifier]) { + NSIndexPath *collectionViewIndexPath = [self.conversationDataSource collectionViewIndexPathForQueryControllerIndexPath:queryControllerIndexPath]; + [self configureHeader:header atIndexPath:collectionViewIndexPath]; + break; + } + } + + // ...and the footer + for (ATLConversationCollectionViewFooter *footer in self.sectionFooters) { + NSIndexPath *queryControllerIndexPath = [self.conversationDataSource.queryController indexPathForObject:footer.message]; + if (queryControllerIndexPath && [footer.message.identifier isEqual:message.identifier]) { + NSIndexPath *collectionViewIndexPath = [self.conversationDataSource collectionViewIndexPathForQueryControllerIndexPath:queryControllerIndexPath]; + [self configureFooter:footer atIndexPath:collectionViewIndexPath]; + break; + } + } +} + #pragma mark - Helpers - (LYRConversation *)existingConversationWithParticipantIdentifiers:(NSSet *)participantIdentifiers { NSMutableSet *set = [participantIdentifiers mutableCopy]; [set addObject:self.layerClient.authenticatedUserID]; - LYRQuery *query = [LYRQuery queryWithClass:[LYRConversation class]]; - query.predicate = [LYRPredicate predicateWithProperty:@"participants" operator:LYRPredicateOperatorIsEqualTo value:set]; + LYRQuery *query = [LYRQuery queryWithQueryableClass:[LYRConversation class]]; + query.predicate = [LYRPredicate predicateWithProperty:@"participants" predicateOperator:LYRPredicateOperatorIsEqualTo value:set]; query.limit = 1; return [self.layerClient executeQuery:query error:nil].lastObject; } @@ -1129,15 +1320,20 @@ - (NSOrderedSet *)participantsForIdentifiers:(NSOrderedSet *)identifiers - (NSString *)participantNameForMessage:(LYRMessage *)message { - id participant = [self participantForIdentifier:message.sentByUserID]; - NSString *participantName = participant.fullName ?: @"Unknown User"; + NSString *participantName; + if (message.sender.userID) { + id participant = [self participantForIdentifier:message.sender.userID]; + participantName = participant.fullName ?: ATLLocalizedString(@"atl.conversation.participant.unknown.key", @"Unknown User", nil); + } else { + participantName = message.sender.name; + } return participantName; } #pragma mark - NSNotification Center Registration - (void)atl_registerForNotifications -{ +{ // Layer Notifications [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(didReceiveTypingIndicator:) name:LYRConversationDidReceiveTypingIndicatorNotification object:nil]; [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(layerClientObjectsDidChange:) name:LYRClientObjectsDidChangeNotification object:nil]; diff --git a/Code/Controllers/ATLParticipantTableViewController.m b/Code/Controllers/ATLParticipantTableViewController.m index 319b8711b..e87e4c88a 100644 --- a/Code/Controllers/ATLParticipantTableViewController.m +++ b/Code/Controllers/ATLParticipantTableViewController.m @@ -23,6 +23,7 @@ #import "ATLParticipantSectionHeaderView.h" #import "ATLConstants.h" #import "ATLAvatarImageView.h" +#import "ATLMessagingUtilities.h" static NSString *const ATLParticipantTableSectionHeaderIdentifier = @"ATLParticipantTableSectionHeaderIdentifier"; static NSString *const ATLParticipantCellIdentifier = @"ATLParticipantCellIdentifier"; @@ -32,15 +33,20 @@ @interface ATLParticipantTableViewController () 0 && undisplayedCount == 1) { - [textForUndisplayedParticipants appendString:@" other"]; + [textForUndisplayedParticipants appendString:ATLLocalizedString(@"atl.typingindicator.spaces.other.key", @" other", nil)]; } else if (displayedFirstNamesCount > 0) { - [textForUndisplayedParticipants appendString:@" others"]; + [textForUndisplayedParticipants appendString:ATLLocalizedString(@"atl.typingindicator.spaces.others.key", @" others", nil)]; } [strings addObject:textForUndisplayedParticipants]; @@ -141,18 +141,18 @@ - (NSString *)typingIndicatorTextWithParticipantStrings:(NSArray *)participantSt NSUInteger lastIndex = participantStrings.count - 1; [participantStrings enumerateObjectsUsingBlock:^(NSString *participantString, NSUInteger index, BOOL *stop) { if (index == lastIndex && participantStrings.count == 2) { - [text appendString:@" and "]; + [text appendString:ATLLocalizedString(@"atl.typingindicator.spaces.and.key", @" and ", nil)]; } else if (index == lastIndex && participantStrings.count > 2) { - [text appendString:@", and "]; + [text appendString:ATLLocalizedString(@"atl.typingindicator.spaces.comma.and.key", @", and ", nil)]; } else if (index > 0) { - [text appendString:@", "]; + [text appendString:ATLLocalizedString(@"atl.typingindicator.spaces.comma.key", @", ", nil)]; } [text appendString:participantString]; }]; if (participantsCount == 1) { - [text appendString:@" is typing…"]; + [text appendString:ATLLocalizedString(@"atl.typingindicator.istyping.key", @" is typing…", nil)]; } else { - [text appendString:@" are typing…"]; + [text appendString:ATLLocalizedString(@"atl.typingindicator.aretyping.key", @" are typing…", nil)]; } return text; } @@ -164,13 +164,14 @@ - (BOOL)typingIndicatorLabelHasSpaceForText:(NSString *)text return fittedSize.width <= CGRectGetWidth(label.frame); } - - (void)configureToLabelConstraints { [self.view addConstraint:[NSLayoutConstraint constraintWithItem:_label attribute:NSLayoutAttributeLeft relatedBy:NSLayoutRelationEqual toItem:self.view attribute:NSLayoutAttributeLeft multiplier:1.0 constant:8]]; - [self.view addConstraint:[NSLayoutConstraint constraintWithItem:_label attribute:NSLayoutAttributeRight relatedBy:NSLayoutRelationEqual toItem:self.view attribute:NSLayoutAttributeRight multiplier:1.0 constant:-8]]; [self.view addConstraint:[NSLayoutConstraint constraintWithItem:_label attribute:NSLayoutAttributeTop relatedBy:NSLayoutRelationEqual toItem:self.view attribute:NSLayoutAttributeTop multiplier:1.0 constant:0]]; [self.view addConstraint:[NSLayoutConstraint constraintWithItem:_label attribute:NSLayoutAttributeHeight relatedBy:NSLayoutRelationEqual toItem:self.view attribute:NSLayoutAttributeHeight multiplier:1.0 constant:0]]; + NSLayoutConstraint *rightConstraint = [NSLayoutConstraint constraintWithItem:_label attribute:NSLayoutAttributeRight relatedBy:NSLayoutRelationEqual toItem:self.view attribute:NSLayoutAttributeRight multiplier:1.0 constant:-8]; + rightConstraint.priority = 800; + [self.view addConstraint:rightConstraint]; } @end diff --git a/Code/Models/ATLConversationDataSource.h b/Code/Models/ATLConversationDataSource.h index 0f447be09..4ab2dbf98 100644 --- a/Code/Models/ATLConversationDataSource.h +++ b/Code/Models/ATLConversationDataSource.h @@ -40,10 +40,10 @@ extern NSInteger const ATLNumberOfSectionsBeforeFirstMessageSection; /** @abstract Creates and returns an `ATLConversationDataSource` object. @param layerClient An `LYRClient` object used to initialize the `queryController` property. - @param conversation An `LYRConversation` object used in the predicate of the `queryController` property's `LYRQuery`. + @param query An `LYRQuery` object used as the query for the `queryController` property. @return An `ATLConversationDataSource` object. */ -+ (instancetype)dataSourceWithLayerClient:(LYRClient *)layerClient conversation:(LYRConversation *)conversation; ++ (instancetype)dataSourceWithLayerClient:(LYRClient *)layerClient query:(LYRQuery *)query; /** @abstract The `LYRQueryController` object managing data displayed in the `ATLConversationViewController`. diff --git a/Code/Models/ATLConversationDataSource.m b/Code/Models/ATLConversationDataSource.m index 457a497e9..3dd482fb3 100644 --- a/Code/Models/ATLConversationDataSource.m +++ b/Code/Models/ATLConversationDataSource.m @@ -31,26 +31,27 @@ @implementation ATLConversationDataSource NSInteger const ATLNumberOfSectionsBeforeFirstMessageSection = 1; NSInteger const ATLQueryControllerPaginationWindow = 30; -+ (instancetype)dataSourceWithLayerClient:(LYRClient *)layerClient conversation:(LYRConversation *)conversation ++ (instancetype)dataSourceWithLayerClient:(LYRClient *)layerClient query:(LYRQuery *)query { - return [[self alloc] initWithLayerClient:layerClient conversation:conversation]; + return [[self alloc] initWithLayerClient:layerClient query:query]; } -- (id)initWithLayerClient:(LYRClient *)layerClient conversation:(LYRConversation *)conversation +- (id)initWithLayerClient:(LYRClient *)layerClient query:(LYRQuery *)query { self = [super init]; if (self) { - LYRQuery *query = [LYRQuery queryWithClass:[LYRMessage class]]; - query.predicate = [LYRPredicate predicateWithProperty:@"conversation" operator:LYRPredicateOperatorIsEqualTo value:conversation]; - query.sortDescriptors = @[[NSSortDescriptor sortDescriptorWithKey:@"index" ascending:YES]]; - NSUInteger numberOfMessagesAvailable = [layerClient countForQuery:query error:nil]; NSUInteger numberOfMessagesToDisplay = MIN(numberOfMessagesAvailable, ATLQueryControllerPaginationWindow); - _queryController = [layerClient queryControllerWithQuery:query]; + NSError *error = nil; + _queryController = [layerClient queryControllerWithQuery:query error:&error]; + if (!_queryController) { + NSLog(@"LayerKit failed to create a query controller with error: %@", error); + return nil; + } _queryController.updatableProperties = [NSSet setWithObjects:@"parts.transferStatus", @"recipientStatusByUserID", @"sentAt", nil]; _queryController.paginationWindow = -numberOfMessagesToDisplay; - NSError *error = nil; + BOOL success = [_queryController execute:&error]; if (!success) NSLog(@"LayerKit failed to execute query with error: %@", error); } @@ -60,10 +61,16 @@ - (id)initWithLayerClient:(LYRClient *)layerClient conversation:(LYRConversation - (void)expandPaginationWindow { self.expandingPaginationWindow = YES; - if (!self.queryController) return; + if (!self.queryController) { + self.expandingPaginationWindow = NO; + return; + } BOOL moreMessagesAvailable = self.queryController.totalNumberOfObjects > ABS(self.queryController.paginationWindow); - if (!moreMessagesAvailable) return; + if (!moreMessagesAvailable) { + self.expandingPaginationWindow = NO; + return; + } NSUInteger numberOfMessagesToDisplay = MIN(-self.queryController.paginationWindow + ATLQueryControllerPaginationWindow, self.queryController.totalNumberOfObjects); self.queryController.paginationWindow = -numberOfMessagesToDisplay; diff --git a/Code/Models/ATLMediaAttachment.h b/Code/Models/ATLMediaAttachment.h index 54589de46..22a377068 100644 --- a/Code/Models/ATLMediaAttachment.h +++ b/Code/Models/ATLMediaAttachment.h @@ -36,7 +36,12 @@ typedef NS_ENUM(NSUInteger, ATLMediaAttachmentType) { @constant Media attachment containing image data. @discussion Sets mediaMIMEType = @"image/jpeg"; thumbnailMIMEType = @"image/jpeg+preview"; metadataMIMEType = @"application/json+imageSize"; textRepresentation = @"Attachment: Image"; */ - ATLMediaAttachmentTypeImage + ATLMediaAttachmentTypeImage, + /** + @constant Media attachment containing video data. + @discussion Sets mediaMIMEType = @"video/mp4"; thumbnailMIMEType = @"video/jpeg+preview"; metadataMIMEType = @"application/json+imageSize"; textRepresentation = @"Attachment: Video"; + */ + ATLMediaAttachmentTypeVideo }; /** @@ -54,7 +59,7 @@ typedef NS_ENUM(NSUInteger, ATLMediaAttachmentType) { @abstract Creates a new `ATLMediaAttachment` instance of type `ATLMediaAttachmentTypeImage` based on Apple's Photo Library's `ALAsset` URL identifier. @param assetURL URL path of the media asset. @param thumbnailSize The size of the thumbnail. - @return Instance of `ATLMediaAttachment` containing streams. + @return Instance of `ATLMediaAttachment` containing the streams. */ + (instancetype)mediaAttachmentWithAssetURL:(NSURL *)assetURL thumbnailSize:(NSUInteger)thumbnailSize; @@ -63,10 +68,18 @@ typedef NS_ENUM(NSUInteger, ATLMediaAttachmentType) { @param assetURL Image in a form of `UIImage`. @param metadata The metadata that will be attached to the image content (such as EXIF). @param thumbnailSize The size of the thumbnail. - @return Instance of `ATLMediaAttachment` containing streams. + @return Instance of `ATLMediaAttachment` containing the streams. */ + (instancetype)mediaAttachmentWithImage:(UIImage *)image metadata:(NSDictionary *)metadata thumbnailSize:(NSUInteger)thumbnailSize; +/** + @abstract Creates a new `ATLMediaAttachment` instance either of type `ATLMediaAttachmentTypeImage` or `ATLMediaAttachmentTypeVideo`, depending on the input file. + @param fileURL File path in a form of `NSURL`. + @param thumbnailSize The size of the thumbnail. + @return Instance of `ATLMediaAttachment` containing the streams. + */ ++ (instancetype)mediaAttachmentWithFileURL:(NSURL *)fileURL thumbnailSize:(NSUInteger)thumbnailSize; + /** @abstract Creates a new `ATLMediaAttachment` instance of type `ATLMediaAttachmentTypeText` based on `NSString` text. @param text Text in a form of `NSString`. @@ -98,6 +111,11 @@ typedef NS_ENUM(NSUInteger, ATLMediaAttachmentType) { /// @name Consumable Attributes ///---------------------------- +/** + @abstract Maximum size of thummail when presented in input toolbar + */ +@property (nonatomic, assign) CGSize maximumInputSize; + /** @abstract A text representation of the media, useful for push alert texts or cells that don't display media items (like conversation list view). @see `ATLMediaAttachmentType` what `textRepresentation` contains for different media attachment types. diff --git a/Code/Models/ATLMediaAttachment.m b/Code/Models/ATLMediaAttachment.m index a4e339177..a18b3d7b0 100644 --- a/Code/Models/ATLMediaAttachment.m +++ b/Code/Models/ATLMediaAttachment.m @@ -22,6 +22,7 @@ #import "ATLMessagingUtilities.h" #import "ATLMediaInputStream.h" #import +#import /** @abstract Fetches the ALAsset from library based on given `assetURL`. @@ -30,8 +31,32 @@ @return An `ALAsset` if successfully retrieved from asset library, otherwise `nil`. */ ALAsset *ATLMediaAttachmentFromAssetURL(NSURL *assetURL, ALAssetsLibrary *assetLibrary); + +/** + @abstract A helper function that streams data straight from an NSInputStream + into the NSData. + @param inputStream The `NSInputStream` where the data will be consumed from. + @return An `NSData` object with data. + */ NSData *ATLMediaAttachmentDataFromInputStream(NSInputStream *inputStream); +/** + @abstract Generates a thumbnail from the desired video by taking a still + snapshot from a frame located at the first second in the video. + @param fileURL File path of the video asset in a form of an `NSURL` + @return Returns a thumbnail image in a form of an `NSUImage`; In case of a + failure, function returns `nil`. + */ +UIImage *ATLMediaAttachmentGenerateThumbnailFromVideoFileURL(NSURL *videoFileURL); + +/** + @abstract Extracts the video orientation based on assetTtrack's affine transform. + @param assetTrack The `AVAssetTrack` for which to extract the video orientation from. + @return Orientation information in a form of `UIImageOrientation`. + */ +UIImageOrientation ATLMediaAttachmentVideoOrientationForAVAssetTrack(AVAssetTrack *assetVideoTrack); + +static int const ATLMediaAttachmentTIFFOrientationToImageOrientationMap[9] = { 0, 0, 6, 1, 5, 4, 4, 7, 2 }; static char const ATLMediaAttachmentAsyncToBlockingQueueName[] = "com.layer.Atlas.ATLMediaAttachment.blocking"; static NSUInteger const ATLMediaAttachmentDataFromStreamBufferSize = 1024 * 1024; static float const ATLMediaAttachmentDefaultThumbnailJPEGCompression = 0.5f; @@ -58,6 +83,7 @@ @interface ATLAssetMediaAttachment : ATLMediaAttachment @property (nonatomic) NSURL *inputAssetURL; - (instancetype)initWithAssetURL:(NSURL *)assetURL thumbnailSize:(NSUInteger)thumbnailSize; +- (instancetype)initWithFileURL:(NSURL *)fileURL thumbnailSize:(NSUInteger)thumbnailSize; @end @@ -90,7 +116,7 @@ - (instancetype)initWithAssetURL:(NSURL *)assetURL thumbnailSize:(NSUInteger)thu self = [super init]; if (self) { if (!assetURL) { - @throw [NSException exceptionWithName:NSInternalInconsistencyException reason:[NSString stringWithFormat:@"Cannot initialize %@ with `nil` assetURL.", self.class] userInfo:nil]; + @throw [NSException exceptionWithName:NSInternalInconsistencyException reason:[NSString stringWithFormat:@"Cannot initialize %@ with `nil` assetURL.", self.superclass] userInfo:nil]; } _inputAssetURL = assetURL; self.thumbnailSize = thumbnailSize; @@ -111,22 +137,40 @@ - (instancetype)initWithAssetURL:(NSURL *)assetURL thumbnailSize:(NSUInteger)thu // Prepare the input stream and MIMEType for the full size media. // -------------------------------------------------------------------- self.mediaInputStream = [ATLMediaInputStream mediaInputStreamWithAssetURL:asset.defaultRepresentation.url]; - self.mediaMIMEType = (__bridge NSString *)(UTTypeCopyPreferredTagWithClass((__bridge CFStringRef)(asset.defaultRepresentation.UTI), kUTTagClassMIMEType)); + + if ( [assetType isEqualToString:ALAssetTypeVideo]) { + self.mediaMIMEType = ATLMIMETypeVideoMP4; + }else { + self.mediaMIMEType = (__bridge NSString *)(UTTypeCopyPreferredTagWithClass((__bridge CFStringRef)(asset.defaultRepresentation.UTI), kUTTagClassMIMEType)); + } // -------------------------------------------------------------------- // Prepare the input stream and MIMEType for the thumbnail. // -------------------------------------------------------------------- - self.thumbnailInputStream = [ATLMediaInputStream mediaInputStreamWithAssetURL:asset.defaultRepresentation.url]; - ((ATLMediaInputStream *)self.thumbnailInputStream).maximumSize = thumbnailSize; - ((ATLMediaInputStream *)self.thumbnailInputStream).compressionQuality = ATLMediaAttachmentDefaultThumbnailJPEGCompression; - self.thumbnailMIMEType = ATLMIMETypeImageJPEGPreview; + if ([self.mediaMIMEType isEqualToString:ATLMIMETypeImageGIF]) { + self.thumbnailInputStream = [ATLMediaInputStream mediaInputStreamWithAssetURL:asset.defaultRepresentation.url]; + ((ATLMediaInputStream *)self.thumbnailInputStream).maximumSize = ATLDefaultGIFThumbnailSize; + self.thumbnailMIMEType = ATLMIMETypeImageGIFPreview; + } else if ([self.mediaMIMEType isEqualToString:ATLMIMETypeVideoMP4]) { + UIImage *image = ATLMediaAttachmentGenerateThumbnailFromVideoFileURL(assetURL); + self.thumbnailInputStream = [ATLMediaInputStream mediaInputStreamWithImage:image metadata:nil]; + ((ATLMediaInputStream *)self.thumbnailInputStream).maximumSize = thumbnailSize; + ((ATLMediaInputStream *)self.thumbnailInputStream).compressionQuality = ATLMediaAttachmentDefaultThumbnailJPEGCompression; + self.thumbnailMIMEType = ATLMIMETypeImageJPEGPreview; + } else { + self.thumbnailInputStream = [ATLMediaInputStream mediaInputStreamWithAssetURL:asset.defaultRepresentation.url]; + ((ATLMediaInputStream *)self.thumbnailInputStream).maximumSize = thumbnailSize; + ((ATLMediaInputStream *)self.thumbnailInputStream).compressionQuality = ATLMediaAttachmentDefaultThumbnailJPEGCompression; + self.thumbnailMIMEType = ATLMIMETypeImageJPEGPreview; + } // -------------------------------------------------------------------- // Prepare the input stream and MIMEType for the metadata // about the asset. // -------------------------------------------------------------------- NSDictionary *imageMetadata = @{ @"width": @(asset.defaultRepresentation.dimensions.width), - @"height": @(asset.defaultRepresentation.dimensions.height) }; + @"height": @(asset.defaultRepresentation.dimensions.height), + @"orientation": @(asset.defaultRepresentation.orientation) }; NSError *JSONSerializerError; NSData *JSONData = [NSJSONSerialization dataWithJSONObject:imageMetadata options:NSJSONWritingPrettyPrinted error:&JSONSerializerError]; if (JSONData) { @@ -145,13 +189,129 @@ - (instancetype)initWithAssetURL:(NSURL *)assetURL thumbnailSize:(NSUInteger)thu // -------------------------------------------------------------------- // Set the type - public property. // -------------------------------------------------------------------- - if ([assetType isEqualToString:ALAssetTypePhoto]) { + if ([assetType isEqualToString:ALAssetTypePhoto] ) { self.mediaType = ATLMediaAttachmentTypeImage; + self.textRepresentation = @"Attachment: Image"; + } else if ([assetType isEqualToString:ALAssetTypeVideo]) { + self.mediaType = ATLMediaAttachmentTypeVideo; + self.textRepresentation = @"Attachment: Video"; } else { return nil; } + } + return self; +} + +- (instancetype)initWithFileURL:(NSURL *)fileURL thumbnailSize:(NSUInteger)thumbnailSize +{ + if (![[NSFileManager defaultManager] fileExistsAtPath:[fileURL path]]) { + @throw [NSException exceptionWithName:NSInternalInconsistencyException reason:[NSString stringWithFormat:@"Cannot initialize %@. File not found at path='%@'.", self.superclass, fileURL] userInfo:nil]; + } + + // -------------------------------------------------------------------- + // Figure out the type of the media from the file extension. + // -------------------------------------------------------------------- + UIImage *thumbnailImage; + CFStringRef fileExtension = (__bridge CFStringRef)[fileURL pathExtension]; + CFStringRef fileUTI = UTTypeCreatePreferredIdentifierForTag(kUTTagClassFilenameExtension, fileExtension, NULL); + if (!(UTTypeConformsTo(fileUTI, kUTTypeImage) || UTTypeConformsTo(fileUTI, kUTTypeVideo) || UTTypeConformsTo(fileUTI, kUTTypeQuickTimeMovie))) { + @throw [NSException exceptionWithName:NSInternalInconsistencyException reason:[NSString stringWithFormat:@"Cannot initialize %@. Unsupported MIMEtype='%@'.", self.superclass, (__bridge NSString *)UTTypeCopyDescription(fileUTI)] userInfo:nil]; + } + + // -------------------------------------------------------------------- + // Prepare the input stream and MIMEType for the full size media. + // -------------------------------------------------------------------- + if (UTTypeConformsTo(fileUTI, kUTTypeImage)) { + self.mediaMIMEType = (__bridge NSString *)(UTTypeCopyPreferredTagWithClass(fileUTI, kUTTagClassMIMEType)); + } else if (UTTypeConformsTo(fileUTI, kUTTypeVideo) || UTTypeConformsTo(fileUTI, kUTTypeQuickTimeMovie)) { + self.mediaMIMEType = ATLMIMETypeVideoMP4; + } + self.mediaInputStream = [ATLMediaInputStream mediaInputStreamWithFileURL:fileURL]; + + // -------------------------------------------------------------------- + // Prepare the input stream and MIMEType for the thumbnail. + // -------------------------------------------------------------------- + if (UTTypeConformsTo(fileUTI, kUTTypeImage)) { + self.thumbnailInputStream = [ATLMediaInputStream mediaInputStreamWithFileURL:fileURL]; + self.thumbnailMIMEType = ATLMIMETypeImageJPEGPreview; + } else if (UTTypeConformsTo(fileUTI, kUTTypeVideo) || UTTypeConformsTo(fileUTI, kUTTypeQuickTimeMovie)) { + if ((UTTypeConformsTo(fileUTI, kUTTypeVideo) || UTTypeConformsTo(fileUTI, kUTTypeQuickTimeMovie))) { + thumbnailImage = ATLMediaAttachmentGenerateThumbnailFromVideoFileURL(fileURL); + } + self.thumbnailInputStream = [ATLMediaInputStream mediaInputStreamWithImage:thumbnailImage metadata:nil]; + self.thumbnailMIMEType = ATLMIMETypeImageJPEGPreview; + } + ((ATLMediaInputStream *)self.thumbnailInputStream).maximumSize = thumbnailSize; + ((ATLMediaInputStream *)self.thumbnailInputStream).compressionQuality = ATLMediaAttachmentDefaultThumbnailJPEGCompression; + + // -------------------------------------------------------------------- + // Prepare the input stream and MIMEType for the metadata information + // about the asset (dimension and orientation). + // -------------------------------------------------------------------- + CGSize mediaDimensions = CGSizeZero; + UIImageOrientation mediaOrientation = UIImageOrientationUp; + if (UTTypeConformsTo(fileUTI, kUTTypeImage)) { + // In case it's an image. + CGDataProviderRef providerRef = CGDataProviderCreateWithURL((CFURLRef)fileURL); + CGImageSourceRef imageSourceRef = CGImageSourceCreateWithDataProvider(providerRef, NULL); + NSDictionary *dict = (__bridge NSDictionary *)(CGImageSourceCopyPropertiesAtIndex(imageSourceRef, 0, NULL)); + CGDataProviderRelease(providerRef); + CFRelease(imageSourceRef); + mediaDimensions.width = [dict[(NSString *)kCGImagePropertyPixelWidth] integerValue]; + mediaDimensions.height = [dict[(NSString *)kCGImagePropertyPixelHeight] integerValue]; + int CGImageTIFFOrientation = [dict[(NSString *)kCGImagePropertyTIFFOrientation] intValue]; + mediaOrientation = ATLMediaAttachmentTIFFOrientationToImageOrientationMap[CGImageTIFFOrientation]; + } else if (UTTypeConformsTo(fileUTI, kUTTypeVideo) || UTTypeConformsTo(fileUTI, kUTTypeQuickTimeMovie)) { + // Or if it's a video. + AVAsset *videoAsset = [AVAsset assetWithURL:fileURL]; + AVAssetTrack *firstVideoAssetTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] firstObject]; + mediaDimensions = firstVideoAssetTrack.naturalSize; + mediaOrientation = ATLMediaAttachmentVideoOrientationForAVAssetTrack(firstVideoAssetTrack); + if (mediaOrientation == UIImageOrientationUp || mediaOrientation == UIImageOrientationDown) { + // Flip the media dimension. + mediaDimensions = CGSizeMake(mediaDimensions.height, mediaDimensions.width); + } + } + + NSDictionary *mediaMetadata = @{ @"width": @(mediaDimensions.width), + @"height": @(mediaDimensions.height), + @"orientation": @(mediaOrientation) }; + NSError *JSONSerializerError; + NSData *JSONData = [NSJSONSerialization dataWithJSONObject:mediaMetadata options:NSJSONWritingPrettyPrinted error:&JSONSerializerError]; + if (JSONData) { + self.metadataInputStream = [NSInputStream inputStreamWithData:JSONData]; + self.metadataMIMEType = ATLMIMETypeImageSize; + } else { + NSLog(@"ATLMediaAttachment failed to generate a JSON object for image metadata"); + } + + // -------------------------------------------------------------------- + // Prepare the attachable thumbnail meant for the UI (which is inlined + // with text in the message composer). + // + // Since we got the full resolution UIImage, we need to create a + // thumbnail size in the initializer. + // -------------------------------------------------------------------- + ATLMediaInputStream *attachableThumbnailInputStream; + if (UTTypeConformsTo(fileUTI, kUTTypeImage)) { + attachableThumbnailInputStream = [ATLMediaInputStream mediaInputStreamWithFileURL:fileURL]; + } else if (UTTypeConformsTo(fileUTI, kUTTypeVideo) || UTTypeConformsTo(fileUTI, kUTTypeQuickTimeMovie)) { + attachableThumbnailInputStream = [ATLMediaInputStream mediaInputStreamWithImage:thumbnailImage metadata:nil]; + } + + attachableThumbnailInputStream.maximumSize = thumbnailSize; + attachableThumbnailInputStream.compressionQuality = ATLMediaAttachmentDefaultThumbnailJPEGCompression; + NSData *resampledImageData = ATLMediaAttachmentDataFromInputStream(attachableThumbnailInputStream); + self.attachableThumbnailImage = [UIImage imageWithData:resampledImageData scale:thumbnailImage.scale]; + + self.thumbnailSize = thumbnailSize; + if (UTTypeConformsTo(fileUTI, kUTTypeImage)) { + self.mediaType = ATLMediaAttachmentTypeImage; self.textRepresentation = @"Attachment: Image"; + } else if (UTTypeConformsTo(fileUTI, kUTTypeVideo) || UTTypeConformsTo(fileUTI, kUTTypeQuickTimeMovie)) { + self.mediaType = ATLMediaAttachmentTypeVideo; + self.textRepresentation = @"Attachment: Video"; } return self; } @@ -165,7 +325,7 @@ - (instancetype)initWithImage:(UIImage *)image metadata:(NSDictionary *)metadata self = [super init]; if (self) { if (!image) { - @throw [NSException exceptionWithName:NSInternalInconsistencyException reason:[NSString stringWithFormat:@"Cannot initialize %@ with `nil` image.", self.class] userInfo:nil]; + @throw [NSException exceptionWithName:NSInternalInconsistencyException reason:[NSString stringWithFormat:@"Cannot initialize %@ with `nil` image.", self.superclass] userInfo:nil]; } self.inputImage = image; @@ -174,7 +334,7 @@ - (instancetype)initWithImage:(UIImage *)image metadata:(NSDictionary *)metadata // -------------------------------------------------------------------- self.mediaInputStream = [ATLMediaInputStream mediaInputStreamWithImage:image metadata:metadata]; self.mediaMIMEType = ATLMIMETypeImageJPEG; - + // -------------------------------------------------------------------- // Prepare the input stream and MIMEType for the thumbnail. // -------------------------------------------------------------------- @@ -182,13 +342,14 @@ - (instancetype)initWithImage:(UIImage *)image metadata:(NSDictionary *)metadata ((ATLMediaInputStream *)self.thumbnailInputStream).maximumSize = thumbnailSize; ((ATLMediaInputStream *)self.thumbnailInputStream).compressionQuality = ATLMediaAttachmentDefaultThumbnailJPEGCompression; self.thumbnailMIMEType = ATLMIMETypeImageJPEGPreview; - + // -------------------------------------------------------------------- // Prepare the input stream and MIMEType for the metadata // about the asset. // -------------------------------------------------------------------- NSDictionary *imageMetadata = @{ @"width": @(image.size.width), - @"height": @(image.size.height) }; + @"height": @(image.size.height), + @"orientation": @(image.imageOrientation) }; NSError *JSONSerializerError; NSData *JSONData = [NSJSONSerialization dataWithJSONObject:imageMetadata options:NSJSONWritingPrettyPrinted error:&JSONSerializerError]; if (JSONData) { @@ -197,7 +358,7 @@ - (instancetype)initWithImage:(UIImage *)image metadata:(NSDictionary *)metadata } else { NSLog(@"ATLMediaAttachment failed to generate a JSON object for image metadata"); } - + // -------------------------------------------------------------------- // Prepare the attachable thumbnail meant for the UI (which is inlined // with text in the message composer). @@ -230,12 +391,12 @@ - (instancetype)initWithLocation:(CLLocation *)location self = [super init]; if (self) { if (!location) { - @throw [NSException exceptionWithName:NSInternalInconsistencyException reason:[NSString stringWithFormat:@"Cannot initialize %@ with `nil` location.", self.class] userInfo:nil]; + @throw [NSException exceptionWithName:NSInternalInconsistencyException reason:[NSString stringWithFormat:@"Cannot initialize %@ with `nil` location.", self.superclass] userInfo:nil]; } self.mediaType = ATLMediaAttachmentTypeLocation; self.mediaMIMEType = ATLMIMETypeLocation; NSData *data = [NSJSONSerialization dataWithJSONObject:@{ ATLLocationLatitudeKey: @(location.coordinate.latitude), - ATLLocationLongitudeKey: @(location.coordinate.longitude) } options:0 error:nil]; + ATLLocationLongitudeKey: @(location.coordinate.longitude) } options:0 error:nil]; self.mediaInputStream = [NSInputStream inputStreamWithData:data]; self.textRepresentation = @"Attachment: Location"; } @@ -251,7 +412,7 @@ - (instancetype)initWithText:(NSString *)text self = [super init]; if (self) { if (!text) { - @throw [NSException exceptionWithName:NSInternalInconsistencyException reason:[NSString stringWithFormat:@"Cannot initialize %@ with `nil` text.", self.class] userInfo:nil]; + @throw [NSException exceptionWithName:NSInternalInconsistencyException reason:[NSString stringWithFormat:@"Cannot initialize %@ with `nil` text.", self.superclass] userInfo:nil]; } self.mediaType = ATLMediaAttachmentTypeText; self.mediaMIMEType = ATLMIMETypeTextPlain; @@ -272,6 +433,11 @@ + (instancetype)mediaAttachmentWithAssetURL:(NSURL *)assetURL thumbnailSize:(NSU return [[ATLAssetMediaAttachment alloc] initWithAssetURL:assetURL thumbnailSize:thumbnailSize]; } ++ (instancetype)mediaAttachmentWithFileURL:(NSURL *)fileURL thumbnailSize:(NSUInteger)thumbnailSize +{ + return [[ATLAssetMediaAttachment alloc] initWithFileURL:fileURL thumbnailSize:thumbnailSize]; +} + + (instancetype)mediaAttachmentWithImage:(UIImage *)image metadata:(NSDictionary *)metadata thumbnailSize:(NSUInteger)thumbnailSize; { return [[ATLImageMediaAttachment alloc] initWithImage:image metadata:(NSDictionary *)metadata thumbnailSize:thumbnailSize]; @@ -287,6 +453,17 @@ + (instancetype)mediaAttachmentWithLocation:(CLLocation *)location return [[ATLLocationMediaAttachment alloc] initWithLocation:location]; } +- (instancetype)init +{ + self = [super init]; + if (self) { + if ([[self class] isEqual:[ATLMediaAttachment class]]) { + @throw [NSException exceptionWithName:NSInternalInconsistencyException reason:[NSString stringWithFormat:@"Failed to call designated initializer. Use one of the following initialiers: %@", [@[ NSStringFromSelector(@selector(mediaAttachmentWithAssetURL:thumbnailSize:)), NSStringFromSelector(@selector(mediaAttachmentWithImage:metadata:thumbnailSize:)), NSStringFromSelector(@selector(mediaAttachmentWithText:)), NSStringFromSelector(@selector(mediaAttachmentWithLocation:)) ] componentsJoinedByString:@", "]] userInfo:nil]; + } + } + return self; +} + #pragma mark - NSTextAttachment Overrides - (UIImage *)image @@ -297,7 +474,7 @@ - (UIImage *)image - (CGRect)attachmentBoundsForTextContainer:(NSTextContainer *)textContainer proposedLineFragment:(CGRect)lineFrag glyphPosition:(CGPoint)position characterIndex:(NSUInteger)charIndex { CGRect systemImageRect = [super attachmentBoundsForTextContainer:textContainer proposedLineFragment:lineFrag glyphPosition:position characterIndex:charIndex]; - return ATLImageRectConstrainedToSize(systemImageRect.size, CGSizeMake(150, 150)); + return ATLImageRectConstrainedToSize(systemImageRect.size, CGSizeEqualToSize(_maximumInputSize, CGSizeZero) ? CGSizeMake(150, 150) : _maximumInputSize); } @end @@ -313,9 +490,24 @@ - (CGRect)attachmentBoundsForTextContainer:(NSTextContainer *)textContainer prop __block ALAsset *resultAsset; dispatch_async(asyncQueue, ^{ [assetLibrary assetForURL:assetURL resultBlock:^(ALAsset *asset) { - resultAsset = asset; - dispatch_semaphore_signal(semaphore); - } failureBlock:^(NSError *libraryError) { + if (asset){ + resultAsset = asset; + dispatch_semaphore_signal(semaphore); + } else { + // On iOS 8.1 [library assetForUrl] Photo Streams always returns nil. Try to obtain it in an alternative way + [assetLibrary enumerateGroupsWithTypes:ALAssetsGroupPhotoStream usingBlock:^(ALAssetsGroup *group, BOOL *stop) { + [group enumerateAssetsWithOptions:NSEnumerationReverse usingBlock:^(ALAsset *result, NSUInteger index, BOOL *stop) { + if([result.defaultRepresentation.url isEqual:assetURL]) { + resultAsset = result; + *stop = YES; + dispatch_semaphore_signal(semaphore); + } + }]; + } failureBlock:^(NSError *error) { + dispatch_semaphore_signal(semaphore); + }]; + } + } failureBlock:^(NSError *error) { dispatch_semaphore_signal(semaphore); }]; }); @@ -329,7 +521,7 @@ - (CGRect)attachmentBoundsForTextContainer:(NSTextContainer *)textContainer prop @throw [NSException exceptionWithName:NSInvalidArgumentException reason:@"inputStream cannot be `nil`." userInfo:nil]; } NSMutableData *dataFromStream = [NSMutableData data]; - + // Open stream [inputStream open]; if (inputStream.streamError) { @@ -354,3 +546,40 @@ - (CGRect)attachmentBoundsForTextContainer:(NSTextContainer *)textContainer prop // Done return dataFromStream; } + +UIImage *ATLMediaAttachmentGenerateThumbnailFromVideoFileURL(NSURL *videoFileURL) +{ + AVURLAsset *URLasset = [[AVURLAsset alloc] initWithURL:videoFileURL options:nil]; + AVAssetImageGenerator *assetImageGenerator = [[AVAssetImageGenerator alloc] initWithAsset:URLasset]; + assetImageGenerator.appliesPreferredTrackTransform = YES; + NSError *error = NULL; + AVAssetTrack *videoAssetTrack = [[URLasset tracksWithMediaType:AVMediaTypeVideo] firstObject]; + CMTime time; + if (videoAssetTrack) { + time = CMTimeMake(0, videoAssetTrack.nominalFrameRate); + } + CGImageRef imageRef = [assetImageGenerator copyCGImageAtTime:time actualTime:NULL error:&error]; + if (error) { + NSLog(@"Failed to create thumbnail!"); + } + UIImage *outputImage = [UIImage imageWithCGImage:imageRef]; + CGImageRelease(imageRef); + return outputImage; +} + +UIImageOrientation ATLMediaAttachmentVideoOrientationForAVAssetTrack(AVAssetTrack *assetVideoTrack) +{ + CGAffineTransform transform = assetVideoTrack.preferredTransform; + int videoAngleInDegrees = (int)((float)atan2(transform.b, transform.a) * (float)180 / (float)M_PI); + switch (videoAngleInDegrees) { + case 90: + return UIImageOrientationUp; + case 180: + return UIImageOrientationLeft; + case -90: + return UIImageOrientationDown; + default: + return UIImageOrientationRight; + } +} + diff --git a/Code/Protocols/ATLAvatarItem.h b/Code/Protocols/ATLAvatarItem.h index 663b1223c..a01c98808 100644 --- a/Code/Protocols/ATLAvatarItem.h +++ b/Code/Protocols/ATLAvatarItem.h @@ -22,11 +22,15 @@ /** @abstract Objects conforming to the `ATLAvatarItem` protocol will be used to display images or - initials in a `ATLAvatarImageView`. + initials in a `ATLAvatarImageView`. The objects can return `nil` if they do not want to use the + specific property. */ @protocol ATLAvatarItem -@optional +/** + @abstract Returns the image URL for an avatar image for the receiver. + */ +@property (nonatomic, readonly) NSURL *avatarImageURL; /** @abstract Returns the avatar image of the receiver. diff --git a/Code/Protocols/ATLConversationPresenting.h b/Code/Protocols/ATLConversationPresenting.h index 4d19e479f..a442d5a37 100644 --- a/Code/Protocols/ATLConversationPresenting.h +++ b/Code/Protocols/ATLConversationPresenting.h @@ -48,4 +48,10 @@ */ - (void)updateWithAvatarItem:(id)avatarItem; +/** + @abstract Provides a string to display representing the conversation's last message. + @param lastMessageText The last message text to display. + */ +- (void)updateWithLastMessageText:(NSString *)lastMessageText; + @end diff --git a/Code/Utilities/ATLLocationManager.m b/Code/Utilities/ATLLocationManager.m index f022b1129..73d6bdcd3 100644 --- a/Code/Utilities/ATLLocationManager.m +++ b/Code/Utilities/ATLLocationManager.m @@ -7,6 +7,7 @@ // #import "ATLLocationManager.h" +#import "ATLMessagingUtilities.h" @interface ATLLocationManager () @@ -42,10 +43,10 @@ - (BOOL)locationServicesEnabled - (void)displayLocationEnablementAlert { - UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:@"Location Access Required" - message:@"To share your location, enable location services for this app in the Privacy section of the Settings app." + UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:ATLLocalizedString(@"atl.locationmanager.alert.title.key", @"Location Access Required", nil) + message:ATLLocalizedString(@"atl.locationmanager.alert.message.key", @"To share your location, enable location services for this app in the Privacy section of the Settings app.", nil) delegate:nil - cancelButtonTitle:@"OK" + cancelButtonTitle:ATLLocalizedString(@"atl.locationmanager.alert.cancel.key", @"OK", nil) otherButtonTitles:nil]; [alertView show]; diff --git a/Code/Utilities/ATLMediaInputStream.h b/Code/Utilities/ATLMediaInputStream.h index 497095f64..9e4b7e8ce 100644 --- a/Code/Utilities/ATLMediaInputStream.h +++ b/Code/Utilities/ATLMediaInputStream.h @@ -40,62 +40,97 @@ typedef NS_ENUM(NSUInteger, ATLMediaInputStreamError) { @abstract An error to open stream if the source asset doesn't contain any items. */ ATLMediaInputStreamErrorAssetHasNoImages = 1003, + /** + @abstract An error to open stream when device doesn't have any compatible export presets. + */ + ATLMediaInputStreamErrorNoVideoExportPresetsAvailable = 1004, + /** + @abstract An error during video export process. + */ + ATLMediaInputStreamErrorVideoExportFailed = 1005, }; /** @abstract The `ATLMediaInputStream` class is responsible for streaming - media content to the receiver. - - It provides direct (lossless) content streaming or resampled and compressed - image streaming. Depending on the input source, which can be either - an `ALAsset` URL or an `UIImage`, streaming, resampling and compression will - be performed without bringing the full image data into the memory. + media content to the receiver. - @discussion Compression and resampling are enabled with setting the - `compressionQuality` and `maximumSize` respectively. - - If setting the `maximumSize = 0` and `compressionQuality = 0.0f`, media content - will be directly transferred from the `ALAsset` or `UIImage`, depending - on the source. Property `isLossless` indicates if the streaming - will be lossless. + It provides direct (lossless) content streaming or resampled and compressed + image streaming. Depending on the input source, which can be either + an `ALAsset` URL, an `UIImage` or a direct file URL, streaming, resampling + and encoding will be performed without bringing the full media data + into the memory. + + @discussion Media encoding and resampling are enabled by setting the + `compressionQuality` and `maximumSize` respectively. + + If setting the `maximumSize = 0` and `compressionQuality = 0.0f`, media content + will be directly transferred from the `ALAsset`, `UIImage` or `fileURL`, + depending on the source. Property `isLossless` indicates the streaming + will be lossless. @warning `ATLMediaInputStream` is GCD based and doesn't utilize `NSRunLoops`. - It may be unrealiable, if paired with a network stream. + It may be unrealiable, if paired with a network stream. */ @interface ATLMediaInputStream : NSInputStream /** - @abstract Creates an input stream capable of direct streaming of an ALAsset's content. - @param assetURL `NSURL` path of the asset (URL starts with `asset://`) that will be serialized for streaming. + @abstract Creates an input stream capable of direct or re-encoded streaming + of an ALAsset's content. + @param assetURL `NSURL` path of the asset (URL starts with `asset://`) that + will be serialized for streaming. @return A `ATLMediaInputStream` instance ready to be open. */ + (instancetype)mediaInputStreamWithAssetURL:(NSURL *)assetURL; /** - @abstract Creates an input stream capable of direct streaming of the UIImage's content. + @abstract Creates an input stream capable of direct or re-encoded streaming + of the UIImage's content. @param image `UIImage` instance that will be serialized for streaming. - @param metadata A `NSDictionary` of metadata that will be attached in the serialized data. Passing `nil` won't attach any metadata to the serialized image. + @param metadata A `NSDictionary` of metadata that will be embedded into the + image. Passing `nil` won't embed any metadata information. @return A `ATLMediaInputStream` instance ready to be open. */ + (instancetype)mediaInputStreamWithImage:(UIImage *)image metadata:(NSDictionary *)metadata; +/** + @abstract Creates an input stream capable of direct or re-encoded media + streaming from the file system. + @param fileURL File URL path to the media content (URL starts with `file://`). + @return A `ATLMediaInputStream` instance ready to be open. + @discussion The input stream will attempt to preserve any embedded + metadata information of the media content. + */ ++ (instancetype)mediaInputStreamWithFileURL:(NSURL *)fileURL; + /** @abstract The source media asset in a form of an `NSURL`. + @discussion Set only when input stream is initialized with the `assetURL`, + otherwise it's `nil`. */ @property (nonatomic, readonly) NSURL *sourceAssetURL; /** - @abstract The source media in a form of an `UIImage`. + @abstract The source image in a form of an `UIImage`. + @discussion Set only when input stream is initialized with the `image`, + otherwise it's `nil`. */ @property (nonatomic, readonly) UIImage *sourceImage; +/** + @abstract The source media file URL in a form of `NSURL`. + @discussion Set only when input stream is initialized with the `fileURL`, + otherwise it's `nil`. + */ +@property (nonatomic, readonly) NSURL *sourceFileURL; + /** @abstract A boolean value indicating if streaming is going to be lossless. */ @property (nonatomic, readonly) BOOL isLossless; /** - @abstract The size in pixels of the output image when being streamed. Default is set to 0. + @abstract The size in pixels of the output image when being streamed. + Default is set to 0. @discussion If set to zero `0`, resampling is disabled. */ @property (nonatomic) NSUInteger maximumSize; @@ -103,10 +138,9 @@ typedef NS_ENUM(NSUInteger, ATLMediaInputStreamError) { /** @abstract The compression quality in percent. Default is set to 0.0f. @discussion 1.0f sets the quality to 100% which preserves details in images, - but also makes a larger output. 0.1f sets the quality to 10% which - is the lowest quality, and makes the file size smaller. - - Setting the property value to zero `0.0f` will disable compression. + but also makes a larger output. 0.1f sets the quality to 10% which + is the lowest quality, and makes the file size smaller. + @note Setting the property value to zero `0.0f` will disable compression. */ @property (nonatomic) float compressionQuality; diff --git a/Code/Utilities/ATLMediaInputStream.m b/Code/Utilities/ATLMediaInputStream.m index 002dd391e..d17eeb5f3 100644 --- a/Code/Utilities/ATLMediaInputStream.m +++ b/Code/Utilities/ATLMediaInputStream.m @@ -21,6 +21,7 @@ #import "ATLMediaInputStream.h" #import #import +@import AVFoundation; #ifdef DEBUG_ATLMediaInputStreamLog #define ATLMediaInputStreamLog(fmt, ...) NSLog(fmt, ##__VA_ARGS__) @@ -33,6 +34,8 @@ static char const ATLMediaInputConsumerSerialTransferQueueName[] = "com.layer.Atlas.ATLMediaInputStream.serialTransferQueue"; static char const ATLMediaInputStreamAsyncToBlockingQueueName[] = "com.layer.Atlas.ATLMediaInputStream.blocking"; NSString *const ATLMediaInputStreamAppleCameraTIFFOptionsKey = @"{TIFF}"; +static NSUInteger const ATLMediaInputDefaultFileStreamBuffer = 1024 * 1024; +NSString *const ATLMediaInputStreamTempDirectory = @"com.layer.atlas"; /* Core I/O callbacks */ ALAsset *ATLMediaInputStreamAssetForAssetURL(NSURL *assetURL, ALAssetsLibrary *assetLibrary, NSError **error); @@ -43,6 +46,7 @@ @interface ATLMediaInputStream () /* Private and public properties */ @property (nonatomic, readwrite) NSURL *sourceAssetURL; +@property (nonatomic, readwrite) NSURL *sourceFileURL; @property (nonatomic, readwrite) UIImage *sourceImage; @property (nonatomic, readwrite) NSDictionary *metadata; @property (nonatomic, readwrite) BOOL isLossless; @@ -64,156 +68,80 @@ @interface ATLMediaInputStream () @property (nonatomic) ALAssetsLibrary *assetLibrary; // needs to be alive during transfer @property (nonatomic) ALAsset *asset; @property (nonatomic) ALAssetRepresentation *assetRepresentation; + +@end + +@interface ATLPhotoInputStream : ATLMediaInputStream + +/* References needed by ALAsset, Core Graphics and Image I/O used during transfer */ @property (nonatomic, assign) CGDataProviderRef provider; @property (nonatomic, assign) CGImageSourceRef source; @property (nonatomic, assign) CGDataConsumerRef consumer; @property (nonatomic, assign) CGImageDestinationRef destination; +@property (nonatomic) NSDictionary *sourceImageProperties; @end -@interface ATLAssetInputStream : ATLMediaInputStream +@interface ATLAssetVideoInputStream : ATLMediaInputStream + +@property (nonatomic, strong) AVAssetExportSession *videoAssetExportSession; - (instancetype)initWithAssetURL:(NSURL *)assetURL; @end -@interface ATLImageInputStream : ATLMediaInputStream +@interface ATLFileVideoInputStream : ATLAssetVideoInputStream -- (instancetype)initWithImage:(UIImage *)image metadata:(NSDictionary *)metadata;; +- (instancetype)initWithFileURL:(NSURL *)fileURL; @end -@implementation ATLAssetInputStream +@interface ATLPhotoAssetInputStream : ATLPhotoInputStream -- (instancetype)initWithAssetURL:(NSURL *)assetURL -{ - self = [super init]; - if (self) { - if (!assetURL) { - @throw [NSException exceptionWithName:NSInternalInconsistencyException reason:[NSString stringWithFormat:@"Cannot initialize %@ with `nil` assetURL.", self.class] userInfo:nil]; - } - self.sourceAssetURL = assetURL; - } - return self; -} +- (instancetype)initWithPhotoAssetURL:(NSURL *)assetURL; @end -@implementation ATLImageInputStream +@interface ATLPhotoFileInputStream : ATLPhotoInputStream -- (instancetype)initWithImage:(UIImage *)image metadata:(NSDictionary *)metadata; -{ - self = [super init]; - if (self) { - if (!image) { - @throw [NSException exceptionWithName:NSInternalInconsistencyException reason:[NSString stringWithFormat:@"Cannot initialize %@ with `nil` image.", self.class] userInfo:nil]; - } - self.sourceImage = image; - self.metadata = metadata; - } - return self; -} +- (instancetype)initWithPhotoFileURL:(NSURL *)fileURL; @end -@implementation ATLMediaInputStream - -#pragma mark - Initializers +@interface ATLImageInputStream : ATLPhotoInputStream -- (instancetype)init -{ - self = [super init]; - if (self) { - if ([[self class] isEqual:[ATLMediaInputStream class]]) { - @throw [NSException exceptionWithName:NSInternalInconsistencyException reason:[NSString stringWithFormat:@"Failed to call designated initializer. Use one of the following initialiers: %@", [@[ NSStringFromSelector(@selector(mediaInputStreamWithAssetURL:)), NSStringFromSelector(@selector(mediaInputStreamWithImage:metadata:)) ] componentsJoinedByString:@","]] userInfo:nil]; - } - _mediaStreamStatus = NSStreamStatusNotOpen; - _mediaStreamError = nil; - _dataConsumed = [NSData data]; - _numberOfBytesRequested = 0; - _numberOfBytesProvided = 0; - _maximumSize = 0; - _compressionQuality = 0.0f; - _streamFlowRequesterSemaphore = dispatch_semaphore_create(0); - _streamFlowProviderSemaphore = dispatch_semaphore_create(0); - _consumerAsyncQueue = dispatch_queue_create(ATLMediaInputConsumerAsyncQueueName, DISPATCH_QUEUE_CONCURRENT); - _transferBufferSerialGuard = dispatch_queue_create(ATLMediaInputConsumerSerialTransferQueueName, DISPATCH_QUEUE_SERIAL); - } - return self; -} - -+ (instancetype)mediaInputStreamWithAssetURL:(NSURL *)assetURL -{ - return [[ATLAssetInputStream alloc] initWithAssetURL:assetURL]; -} - -+ (instancetype)mediaInputStreamWithImage:(UIImage *)image metadata:(NSDictionary *)metadata; -{ - return [[ATLImageInputStream alloc] initWithImage:image metadata:metadata]; -} - -- (void)dealloc -{ - if (self.streamStatus != NSStreamStatusClosed) { - [self close]; - } -} - -#pragma mark - Transient isLossless implementation - -+ (NSSet *)keyPathsForValuesAffectingValueForKey:(NSString *)key -{ - NSSet *keyPaths = [super keyPathsForValuesAffectingValueForKey:key]; - if ([key isEqualToString:@"isLossless"]) { - NSSet *affectingKey = [NSSet setWithObjects:@"maximumSize", @"compressionQuality", nil]; - keyPaths = [keyPaths setByAddingObjectsFromSet:affectingKey]; - } - return keyPaths; -} - -- (BOOL)isLossless -{ - return (self.maximumSize == 0 && self.compressionQuality == 0.0f); -} - -#pragma mark - Public Overrides +- (instancetype)initWithImage:(UIImage *)image metadata:(NSDictionary *)metadata; -- (NSStreamStatus)streamStatus -{ - return self.mediaStreamStatus; -} +@end -- (NSError *)streamError -{ - return self.mediaStreamError; -} +@implementation ATLPhotoInputStream - (void)open { - // Tell receiver we're openning the stream. - self.mediaStreamStatus = NSStreamStatusOpening; - - ATLMediaInputStreamLog(@"opening stream..."); + [super open]; // Setup data provider. - BOOL success; + NSInteger numberOfSourceImages = 0; NSError *error; if (self.sourceAssetURL) { - success = [self setupProviderForAssetStreamingWithError:&error]; + numberOfSourceImages = [self setupProviderForAssetStreamingWithError:&error]; } else if (self.sourceImage) { // UIImages don't need a data provider, we're adding them to CGImageDestination directly. - success = YES; + numberOfSourceImages = 1; + } else if (self.sourceFileURL) { + numberOfSourceImages = [self setupProviderForFileStreamingWithError:&error]; } else { @throw [NSException exceptionWithName:NSInternalInconsistencyException reason:@"Failed setting up data provider because source media not defined." userInfo:nil]; } - if (!success) { + if (numberOfSourceImages == 0) { self.mediaStreamStatus = NSStreamStatusError; self.mediaStreamError = error; return; } // iOS7 specific - if (NSFoundationVersionNumber <= NSFoundationVersionNumber_iOS_7_1) { + BOOL success; + if (&kCGImageDestinationImageMaxPixelSize == NULL) { success = [self setupiOS7SpecificConsumerPrerequisite:&error]; if (!success) { self.mediaStreamStatus = NSStreamStatusError; @@ -223,29 +151,21 @@ - (void)open } // Setup data consumer. - success = [self setupConsumerWithError:&error]; + success = [self setupConsumerWithError:&error numberOfSourceImages:numberOfSourceImages]; if (!success) { self.mediaStreamStatus = NSStreamStatusError; self.mediaStreamError = error; return; } - + // Tell receiver stream is successfully open self.mediaStreamStatus = NSStreamStatusOpen; - return; } - (void)close { - if (self.mediaStreamStatus == NSStreamStatusClosed) { - return; - } + [super close]; - if (self.mediaStreamStatus == NSStreamStatusReading) { - // Close the stream gracefully. - self.numberOfBytesRequested = 0; - ATLMediaInputStreamLog(@"closing stream..."); - } // Release Image I/O references if (_destination != NULL) { CFRelease(_destination); @@ -265,55 +185,6 @@ - (void)close } self.asset = nil; self.assetLibrary = nil; - // Signal any ongoing requests. - dispatch_semaphore_signal(self.streamFlowRequesterSemaphore); - self.mediaStreamStatus = NSStreamStatusClosed; -} - -- (NSInteger)read:(uint8_t *)buffer maxLength:(NSUInteger)bytesToConsume -{ - if (self.mediaStreamStatus == NSStreamStatusOpen) { - [self startConsumption]; - } - - // If already completed - if (self.mediaStreamStatus == NSStreamStatusAtEnd) { - return 0; // EOS - } - - // Cannot provide data, if not in reading state. - if (self.mediaStreamStatus != NSStreamStatusReading) { - return -1; // Operation fails - } - - // Setting the data stream request. - ATLMediaInputStreamLog(@"input stream: requesting %lu of bytes", bytesToConsume); - self.numberOfBytesRequested = bytesToConsume; - - // Notify data provider that request is ready. - dispatch_semaphore_signal(self.streamFlowProviderSemaphore); - - // Wait for the response. - ATLMediaInputStreamLog(@"input stream: waiting for cosumer to prepare data"); - dispatch_semaphore_wait(self.streamFlowRequesterSemaphore, DISPATCH_TIME_FOREVER); - - if (self.mediaStreamStatus == NSStreamStatusError) { - return -1; // Operation failed, see self.streamError; - } - - // Copy the consumed image data to `buffer`. - [self.dataConsumed getBytes:buffer]; - ATLMediaInputStreamLog(@"input stream: passed data to receiver"); - - // Clear transfer buffer. - NSInteger bytesConsumed = self.dataConsumed.length; - self.dataConsumed = [NSData data]; - return bytesConsumed; -} - -- (BOOL)getBuffer:(uint8_t **)buffer length:(NSUInteger *)len -{ - @throw [NSException exceptionWithName:NSInternalInconsistencyException reason:[NSString stringWithFormat:@"Method %@ on %@ not implemented", NSStringFromSelector(@selector(getBuffer:length:)), self.class] userInfo:nil]; } #pragma mark - Private Methods @@ -348,9 +219,9 @@ - (void)startConsumption /** @abstract Prepares the CGDataProvider which slurps data directly from the ALAsset based on the self.assetURL defined at init. @param error A reference to an `NSError` object that will contain error information in case the action was not successful. - @return Returns `YES` if setup was successful; On failures, method sets the `error` and returns `NO`. + @return Returns the number of images that source will provide if the setup was successful; On failures, method sets the `error` and returns `0`. */ -- (BOOL)setupProviderForAssetStreamingWithError:(NSError **)error +- (NSInteger)setupProviderForAssetStreamingWithError:(NSError **)error { // Creating the asset library that needs to be alive during transfer. self.assetLibrary = [[ALAssetsLibrary alloc] init]; @@ -381,7 +252,7 @@ - (BOOL)setupProviderForAssetStreamingWithError:(NSError **)error if (error) { *error = [NSError errorWithDomain:ATLMediaInputStreamErrorDomain code:ATLMediaInputStreamErrorFailedInitializingAssetProvider userInfo:@{ NSLocalizedDescriptionKey: @"Failed initializing the Quartz image data provider/source pair." }]; } - return NO; + return 0; } // There should be at least one image found in the source. @@ -390,9 +261,42 @@ - (BOOL)setupProviderForAssetStreamingWithError:(NSError **)error if (error) { *error = [NSError errorWithDomain:ATLMediaInputStreamErrorDomain code:ATLMediaInputStreamErrorAssetHasNoImages userInfo:@{ NSLocalizedDescriptionKey: @"Failed initializing the Quartz image data provider/source, because source asset doesn't include any images." }]; } - return NO; + return 0; } - return YES; + + // Get source image's properties, because we'll copy it to the destination later. + self.sourceImageProperties = (__bridge NSDictionary *)(CGImageSourceCopyProperties(_source, NULL)); + return count; +} + +/** + @abstract Prepares the CGDataProvider that has the direct access to the file content based on the self.sourceFileURL defined at init. + @param error A reference to an `NSError` object that will contain error information in case the action was not successful. + @return Returns the number of images that source will provide if the setup was successful; On failures, method sets the `error` and returns `0`. + */ +- (NSInteger)setupProviderForFileStreamingWithError:(NSError **)error +{ + _provider = CGDataProviderCreateWithURL((CFURLRef)self.sourceFileURL); + _source = CGImageSourceCreateWithDataProvider(_provider, NULL); + if (self.provider == NULL || self.source == NULL) { + if (error) { + *error = [NSError errorWithDomain:ATLMediaInputStreamErrorDomain code:ATLMediaInputStreamErrorFailedInitializingAssetProvider userInfo:@{ NSLocalizedDescriptionKey: @"Failed initializing the Quartz image data provider/source pair." }]; + } + return 0; + } + + // There should be at least one image found in the source. + size_t count = CGImageSourceGetCount(_source); + if (count <= 0) { + if (error) { + *error = [NSError errorWithDomain:ATLMediaInputStreamErrorDomain code:ATLMediaInputStreamErrorAssetHasNoImages userInfo:@{ NSLocalizedDescriptionKey: @"Failed initializing the Quartz image data provider/source, because source asset doesn't include any images." }]; + } + return 0; + } + + // Get source image's properties, because we'll copy it to the destination later. + self.sourceImageProperties = (__bridge NSDictionary *)(CGImageSourceCopyProperties(_source, NULL)); + return count; } /** @@ -403,7 +307,7 @@ - (BOOL)setupProviderForAssetStreamingWithError:(NSError **)error */ - (BOOL)setupiOS7SpecificConsumerPrerequisite:(NSError **)error { - if (self.maximumSize > 0 && NSFoundationVersionNumber <= NSFoundationVersionNumber_iOS_7_1) { + if (self.maximumSize > 0 && &kCGImageDestinationImageMaxPixelSize == NULL) { CFDataRef cfDataPNGRepresentation; if (!self.sourceAssetURL && self.sourceImage) { // In case the we need to resample an UIImage (which might be @@ -436,7 +340,7 @@ - (BOOL)setupiOS7SpecificConsumerPrerequisite:(NSError **)error @param error A reference to an `NSError` object that will contain error information in case the action was not successful. @return Returns `YES` if setup was successful; On failures, method sets the `error` and returns `NO`. */ -- (BOOL)setupConsumerWithError:(NSError **)error +- (BOOL)setupConsumerWithError:(NSError **)error numberOfSourceImages:(NSInteger)numberOfSourceImages { // Setting up destination-writer (consumer). CGDataConsumerCallbacks dataConsumerCallbacks = { @@ -446,12 +350,17 @@ - (BOOL)setupConsumerWithError:(NSError **)error _consumer = CGDataConsumerCreate((void *)CFBridgingRetain(self), &dataConsumerCallbacks); if (self.assetRepresentation) { // In case source is the ALAsset. - _destination = CGImageDestinationCreateWithDataConsumer(_consumer, (CFStringRef)self.assetRepresentation.UTI, 1, NULL); + _destination = CGImageDestinationCreateWithDataConsumer(_consumer, (CFStringRef)self.assetRepresentation.UTI, numberOfSourceImages, NULL); + } else if (self.sourceFileURL) { + // In case source if a file. + CFStringRef fileExtension = (__bridge CFStringRef)[self.sourceFileURL pathExtension]; + CFStringRef fileUTI = UTTypeCreatePreferredIdentifierForTag(kUTTagClassFilenameExtension, fileExtension, NULL); + _destination = CGImageDestinationCreateWithDataConsumer(_consumer, fileUTI, numberOfSourceImages, NULL); } else { // In case source is the UIImage. _destination = CGImageDestinationCreateWithDataConsumer(_consumer, kUTTypeJPEG, 1, NULL); } - + if (_consumer == NULL || _destination == NULL) { if (error) { *error = [NSError errorWithDomain:ATLMediaInputStreamErrorDomain code:ATLMediaInputStreamErrorFailedInitializingImageIOConsumer userInfo:nil]; @@ -462,7 +371,7 @@ - (BOOL)setupConsumerWithError:(NSError **)error NSMutableDictionary *destinationOptions = self.metadata ? [self.metadata mutableCopy] : [NSMutableDictionary dictionary]; if (self.maximumSize > 0) { // Resample image if requested. - if (NSFoundationVersionNumber > NSFoundationVersionNumber_iOS_7_1) { + if (&kCGImageDestinationImageMaxPixelSize != NULL) { // Unfortunately, this feature is only available on iOS8+. If we're // on <= iOS7.1, image had to be resampled beforehand (see setupiOS7SpecificConsumerPrerequisite:). [destinationOptions setObject:@(self.maximumSize) forKey:(NSString *)kCGImageDestinationImageMaxPixelSize]; @@ -472,21 +381,434 @@ - (BOOL)setupConsumerWithError:(NSError **)error // If image should only be compressed. [destinationOptions setObject:@(self.compressionQuality) forKey:(NSString *)kCGImageDestinationLossyCompressionQuality]; } - if (self.metadata && self.metadata[ATLMediaInputStreamAppleCameraTIFFOptionsKey]) { + if (self.metadata && self.metadata[ATLMediaInputStreamAppleCameraTIFFOptionsKey] && self.metadata[(NSString *)kCGImagePropertyOrientation]) { NSMutableDictionary *mutableTiffDict = [self.metadata[ATLMediaInputStreamAppleCameraTIFFOptionsKey] mutableCopy]; [mutableTiffDict setObject:self.metadata[(NSString *)kCGImagePropertyOrientation] forKey:(NSString *)kCGImagePropertyTIFFOrientation]; [destinationOptions setObject:mutableTiffDict forKey:ATLMediaInputStreamAppleCameraTIFFOptionsKey]; } - if (self.assetRepresentation) { - CGImageDestinationAddImageFromSource(_destination, self.source, 0, (__bridge CFDictionaryRef)destinationOptions); - } else { + if (self.assetRepresentation || self.sourceFileURL) { + for (NSInteger idx=0; idx= 0.8f || self.compressionQuality == 0.0f) { + encoderPresetName = AVAssetExportPresetHighestQuality; + } else if (self.compressionQuality < 0.8f && self.compressionQuality >= 0.5f) { + encoderPresetName = AVAssetExportPresetMediumQuality; + } else if (self.compressionQuality < 0.5f) { + encoderPresetName = AVAssetExportPresetLowQuality; + } + + // Check if it's compatible with this device. + NSArray *availablePressets = [AVAssetExportSession exportPresetsCompatibleWithAsset:videoAVAsset]; + if (![availablePressets containsObject:encoderPresetName]) { + // Bah, it's not. Fall back to whatever's the first preset. + encoderPresetName = availablePressets.firstObject; + if (!encoderPresetName) { + self.mediaStreamError = [NSError errorWithDomain:ATLMediaInputStreamErrorDomain code:ATLMediaInputStreamErrorNoVideoExportPresetsAvailable userInfo:@{ NSLocalizedDescriptionKey: @"Could not find any export presets for the host device." }]; + self.mediaStreamStatus = NSStreamEventErrorOccurred; + return; + } + } + + // Prepare the temporary file URL (it should be a member property). + NSArray *paths = NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES); + NSString *basePath = ([paths count] > 0) ? [paths objectAtIndex:0] : nil; + NSURL *baseURL = [NSURL fileURLWithPath:basePath isDirectory:YES]; + NSURL *outputDirURL = [NSURL URLWithString:ATLMediaInputStreamTempDirectory relativeToURL:baseURL]; + NSURL *outputURL = [NSURL URLWithString:[NSString stringWithFormat:@"exported-video-%@.mp4", [[NSUUID UUID] UUIDString]] relativeToURL:outputDirURL]; + [[NSFileManager defaultManager] createDirectoryAtURL:outputDirURL withIntermediateDirectories:YES attributes:nil error:nil]; + [[NSFileManager defaultManager] removeItemAtURL:outputURL error:nil]; + + // Prepare the AVExportSession (use the temp file url). + self.videoAssetExportSession = [[AVAssetExportSession alloc] initWithAsset:videoAVAsset presetName:encoderPresetName]; + self.videoAssetExportSession.outputURL = outputURL.absoluteURL; + self.videoAssetExportSession.outputFileType = AVFileTypeMPEG4; + self.videoAssetExportSession.shouldOptimizeForNetworkUse = YES; + + // Success + self.mediaStreamStatus = NSStreamStatusOpen; +} + +- (void)close +{ + [super close]; + // Delete the temporary file at path where the video was exported to. + if (self.videoAssetExportSession.outputURL) { + [[NSFileManager defaultManager] removeItemAtURL:self.videoAssetExportSession.outputURL error:nil]; + } + // Nil out export session and do other cleanups. + self.videoAssetExportSession = nil; + self.dataConsumed = nil; + self.mediaStreamStatus = NSStreamStatusClosed; +} + +- (void)startConsumption +{ + self.mediaStreamStatus = NSStreamStatusReading; + [self.videoAssetExportSession exportAsynchronouslyWithCompletionHandler:^{ + switch (self.videoAssetExportSession.status) { + case AVAssetExportSessionStatusFailed: { + ATLMediaInputStreamLog(@"consumer: failed exporting the video with error=%@", self.exportSession.error); + self.mediaStreamError = self.videoAssetExportSession.error; + self.mediaStreamStatus = NSStreamStatusError; + dispatch_semaphore_signal(self.streamFlowRequesterSemaphore); + break; + } + case AVAssetExportSessionStatusCompleted: { + ATLMediaInputStreamLog(@"consumer: export completed"); + [self consumeData]; + break; + } + default: { + self.mediaStreamError = [NSError errorWithDomain:ATLMediaInputStreamErrorDomain code:ATLMediaInputStreamErrorVideoExportFailed userInfo:@{ NSLocalizedDescriptionKey: @"Could not export the video.", @"exporterror": self.videoAssetExportSession.error ?: [NSNull null], @"exportstatus": @(self.videoAssetExportSession.status) }]; + self.mediaStreamStatus = NSStreamStatusError; + ATLMediaInputStreamLog(@"consumer: failed exporting the video with error=%@", self.mediaStreamError); + dispatch_semaphore_signal(self.streamFlowRequesterSemaphore); + break; + } + } + }]; +} + +- (void)consumeData +{ + NSInputStream *exportedVideoFileInputStream = [[NSInputStream alloc]initWithURL:self.videoAssetExportSession.outputURL]; + NSMutableData *dataFromStream = [NSMutableData data]; + uint8_t *buffer = malloc(ATLMediaInputDefaultFileStreamBuffer); + NSInteger bytesRead; + + // Open the expoted video file input stream. + [exportedVideoFileInputStream open]; + + if (exportedVideoFileInputStream.streamStatus != NSStreamStatusOpen) { + self.mediaStreamError = exportedVideoFileInputStream.streamError; + self.mediaStreamStatus = exportedVideoFileInputStream.streamStatus; + dispatch_semaphore_signal(self.streamFlowRequesterSemaphore); + return; + } + + do { + bytesRead = [exportedVideoFileInputStream read:buffer maxLength:MIN(ATLMediaInputDefaultFileStreamBuffer, self.numberOfBytesRequested)]; + if (bytesRead != 0) { + [dataFromStream appendBytes:buffer length:self.numberOfBytesRequested]; + } else if (bytesRead < 0) { + self.mediaStreamStatus = exportedVideoFileInputStream.streamStatus; + self.mediaStreamError = exportedVideoFileInputStream.streamError; + ATLMediaInputStreamLog(@"consumer: failed streaming the exported video file with error=%@", exportedVideoFileInputStream.streamError); + break; + } + // Consumption continues, after flow control logic in readBytes:len: signals it. + dispatch_sync(self.transferBufferSerialGuard, ^{ + ATLMediaInputStreamLog(@"consumer: waiting for request from stream (have %lu bytes ready)", bytesRead); + dispatch_semaphore_wait(self.streamFlowProviderSemaphore, DISPATCH_TIME_FOREVER); + }); + + // Copy buffer into NSData that was consumed by the file input stream. + NSUInteger bytesConsumed = MIN(self.numberOfBytesRequested, bytesRead); + NSData *dataConsumed = [NSData dataWithBytes:buffer length:bytesConsumed]; + self.dataConsumed = dataConsumed; + ATLMediaInputStreamLog(@"consumer: consumed %lu bytes (requested %lu bytes, provided %lu bytes)", (unsigned long)dataConsumed.length, (unsigned long)assetStream.numberOfBytesRequested, length); + + // Signal the requester data is ready for consumption. + dispatch_semaphore_signal(self.streamFlowRequesterSemaphore); + ATLMediaInputStreamLog(@"return %lu", (unsigned long)bytesConsumed); + } while (bytesRead != 0); + [exportedVideoFileInputStream close]; + + free(buffer); + + if (bytesRead == 0) { + self.mediaStreamStatus = NSStreamStatusAtEnd; + } + dispatch_semaphore_signal(self.streamFlowRequesterSemaphore); +} + +@end + +@implementation ATLMediaInputStream + +#pragma mark - Public Factories + ++ (instancetype)mediaInputStreamWithAssetURL:(NSURL *)assetURL +{ + ALAssetsLibrary *assetsLibrary = [[ALAssetsLibrary alloc] init]; + ALAsset *asset = ATLMediaInputStreamAssetForAssetURL(assetURL, assetsLibrary, nil); + if (!asset) { + return nil; + } + if ([[asset valueForProperty:ALAssetPropertyType] isEqualToString:ALAssetTypeVideo]) { + return [[ATLAssetVideoInputStream alloc] initWithAssetURL:assetURL]; + } else if ([[asset valueForProperty:ALAssetPropertyType] isEqualToString:ALAssetTypePhoto]) { + return [[ATLPhotoAssetInputStream alloc] initWithPhotoAssetURL:assetURL]; + } else { + return nil; + } +} + ++ (instancetype)mediaInputStreamWithImage:(UIImage *)image metadata:(NSDictionary *)metadata; +{ + return [[ATLImageInputStream alloc] initWithImage:image metadata:metadata]; +} + ++ (instancetype)mediaInputStreamWithFileURL:(NSURL *)fileURL +{ + if (!fileURL) { + return nil; + } + CFStringRef fileExtension = (__bridge CFStringRef)[fileURL pathExtension]; + CFStringRef fileUTI = UTTypeCreatePreferredIdentifierForTag(kUTTagClassFilenameExtension, fileExtension, NULL); + if (UTTypeConformsTo(fileUTI, kUTTypeImage)) { + return [[ATLPhotoFileInputStream alloc] initWithPhotoFileURL:fileURL]; + } else if (UTTypeConformsTo(fileUTI, kUTTypeMovie)) { + return [[ATLFileVideoInputStream alloc] initWithFileURL:fileURL]; + } else { + NSLog(@"Failed to initialize an input stream for an unkown type: '%@'", (__bridge NSString *)UTTypeCopyDescription(fileUTI)); + return nil; + } +} + +#pragma mark - Initializers + +- (instancetype)init +{ + self = [super init]; + if (self) { + if ([[self class] isEqual:[ATLMediaInputStream class]]) { + @throw [NSException exceptionWithName:NSInternalInconsistencyException reason:[NSString stringWithFormat:@"Failed to call designated initializer. Use one of the following initialiers: %@", [@[ NSStringFromSelector(@selector(mediaInputStreamWithAssetURL:)), NSStringFromSelector(@selector(mediaInputStreamWithImage:metadata:)) ] componentsJoinedByString:@", "]] userInfo:nil]; + } + _mediaStreamStatus = NSStreamStatusNotOpen; + _mediaStreamError = nil; + _dataConsumed = [NSData data]; + _numberOfBytesRequested = 0; + _numberOfBytesProvided = 0; + _maximumSize = 0; + _compressionQuality = 0.0f; + _streamFlowRequesterSemaphore = dispatch_semaphore_create(0); + _streamFlowProviderSemaphore = dispatch_semaphore_create(0); + _consumerAsyncQueue = dispatch_queue_create(ATLMediaInputConsumerAsyncQueueName, DISPATCH_QUEUE_CONCURRENT); + _transferBufferSerialGuard = dispatch_queue_create(ATLMediaInputConsumerSerialTransferQueueName, DISPATCH_QUEUE_SERIAL); + } + return self; +} + +- (void)dealloc +{ + if (self.streamStatus != NSStreamStatusClosed) { + [self close]; + } +} + +#pragma mark - Transient isLossless implementation + ++ (NSSet *)keyPathsForValuesAffectingValueForKey:(NSString *)key +{ + NSSet *keyPaths = [super keyPathsForValuesAffectingValueForKey:key]; + if ([key isEqualToString:@"isLossless"]) { + NSSet *affectingKey = [NSSet setWithObjects:@"maximumSize", @"compressionQuality", nil]; + keyPaths = [keyPaths setByAddingObjectsFromSet:affectingKey]; + } + return keyPaths; +} + +- (BOOL)isLossless +{ + return (self.maximumSize == 0 && self.compressionQuality == 0.0f); +} + +#pragma mark - Public Overrides + +- (NSStreamStatus)streamStatus +{ + return self.mediaStreamStatus; +} + +- (NSError *)streamError +{ + return self.mediaStreamError; +} + +- (void)open +{ + // Tell receiver we're openning the stream. + self.mediaStreamStatus = NSStreamStatusOpening; + + ATLMediaInputStreamLog(@"opening stream..."); + return; +} + +- (void)close +{ + if (self.mediaStreamStatus == NSStreamStatusClosed) { + return; + } + + if (self.mediaStreamStatus == NSStreamStatusReading) { + // Close the stream gracefully. + self.numberOfBytesRequested = 0; + ATLMediaInputStreamLog(@"closing stream..."); + } + // Signal any ongoing requests. + dispatch_semaphore_signal(self.streamFlowRequesterSemaphore); + self.mediaStreamStatus = NSStreamStatusClosed; +} + +- (NSInteger)read:(uint8_t *)buffer maxLength:(NSUInteger)bytesToConsume +{ + if (self.mediaStreamStatus == NSStreamStatusOpen) { + [self startConsumption]; + } + + // If already completed + if (self.mediaStreamStatus == NSStreamStatusAtEnd) { + return 0; // EOS + } + + // Cannot provide data, if not in reading state. + if (self.mediaStreamStatus != NSStreamStatusReading) { + return -1; // Operation fails + } + + // Setting the data stream request. + ATLMediaInputStreamLog(@"input stream: requesting %lu of bytes", bytesToConsume); + self.numberOfBytesRequested = bytesToConsume; + + // Notify data provider that request is ready. + dispatch_semaphore_signal(self.streamFlowProviderSemaphore); + + // Wait for the response. + ATLMediaInputStreamLog(@"input stream: waiting for cosumer to prepare data"); + dispatch_semaphore_wait(self.streamFlowRequesterSemaphore, DISPATCH_TIME_FOREVER); + + if (self.mediaStreamStatus == NSStreamStatusError) { + return -1; // Operation failed, see self.streamError; + } + + // Copy the consumed data to `buffer`. + [self.dataConsumed getBytes:buffer length:bytesToConsume]; + ATLMediaInputStreamLog(@"input stream: passed data to receiver"); + + // Clear transfer buffer. + NSInteger bytesConsumed = self.dataConsumed.length; + self.dataConsumed = [NSData data]; + return bytesConsumed; +} + +/** + @abstract Override this method to start providing data to the data consumer. + @note Do not call `[super close];` in your subclassed implementations. + */ +- (void)startConsumption +{ + [self close]; +} + +- (BOOL)getBuffer:(uint8_t **)buffer length:(NSUInteger *)len +{ + @throw [NSException exceptionWithName:NSInternalInconsistencyException reason:[NSString stringWithFormat:@"Method %@ on %@ not implemented", NSStringFromSelector(@selector(getBuffer:length:)), self.class] userInfo:nil]; +} + +@end + #pragma mark - Image I/O Callback Implementation ALAsset *ATLMediaInputStreamAssetForAssetURL(NSURL *assetURL, ALAssetsLibrary *assetLibrary, NSError **error) @@ -500,10 +822,28 @@ - (BOOL)setupConsumerWithError:(NSError **)error __block ALAsset *resultAsset; dispatch_async(asyncQueue, ^{ [assetLibrary assetForURL:assetURL resultBlock:^(ALAsset *asset) { - resultAsset = asset; - dispatch_semaphore_signal(semaphore); + if (asset){ + resultAsset = asset; + dispatch_semaphore_signal(semaphore); + } else { + // On iOS 8.1 [library assetForUrl] Photo Streams always returns nil. Try to obtain it in an alternative way + [assetLibrary enumerateGroupsWithTypes:ALAssetsGroupPhotoStream usingBlock:^(ALAssetsGroup *group, BOOL *stop) { + [group enumerateAssetsWithOptions:NSEnumerationReverse usingBlock:^(ALAsset *result, NSUInteger index, BOOL *stop) { + if([result.defaultRepresentation.url isEqual:assetURL]) { + resultAsset = result; + *stop = YES; + dispatch_semaphore_signal(semaphore); + } + }]; + } failureBlock:^(NSError *libraryError) { + if (libraryError) { + *error = libraryError; + } + dispatch_semaphore_signal(semaphore); + }]; + } } failureBlock:^(NSError *libraryError) { - if (error) { + if (libraryError) { *error = libraryError; } dispatch_semaphore_signal(semaphore); diff --git a/Code/Utilities/ATLMessagingUtilities.h b/Code/Utilities/ATLMessagingUtilities.h index dbb9985d2..05b9fda73 100644 --- a/Code/Utilities/ATLMessagingUtilities.h +++ b/Code/Utilities/ATLMessagingUtilities.h @@ -28,17 +28,28 @@ extern NSString *const ATLMIMETypeTextPlain; // text/plain extern NSString *const ATLMIMETypeImagePNG; // image/png extern NSString *const ATLMIMETypeImageJPEG; // image/jpeg extern NSString *const ATLMIMETypeImageJPEGPreview; // image/jpeg+preview +extern NSString *const ATLMIMETypeImageGIF; // image/gif +extern NSString *const ATLMIMETypeImageGIFPreview; // image/gif+preview extern NSString *const ATLMIMETypeImageSize; // application/json+imageSize +extern NSString *const ATLMIMETypeVideoQuickTime; // video/quicktime extern NSString *const ATLMIMETypeLocation; // location/coordinate extern NSString *const ATLMIMETypeDate; // text/date - +extern NSString *const ATLMIMETypeVideoMP4; // video/mp4 extern NSUInteger const ATLDefaultThumbnailSize; // 512px +extern NSUInteger const ATLDefaultGIFThumbnailSize; // 64px +extern NSString *const ATLPasteboardImageKey; extern NSString *const ATLImagePreviewWidthKey; extern NSString *const ATLImagePreviewHeightKey; extern NSString *const ATLLocationLatitudeKey; extern NSString *const ATLLocationLongitudeKey; +//--------------------------------- +// @name Internationalization Macro +//--------------------------------- + +#define ATLLocalizedString(key, value, comment) NSLocalizedStringWithDefaultValue(key, nil, [NSBundle mainBundle], value, comment) + //-------------------------- // @name Max Cell Dimensions //-------------------------- @@ -68,6 +79,8 @@ CGSize ATLTextPlainSize(NSString *string, UIFont *font); CGRect ATLImageRectConstrainedToSize(CGSize imageSize, CGSize maxSize); +CGFloat ATLDegreeToRadians(CGFloat degrees); + //----------------------------- // @name Message Part Utilities //----------------------------- @@ -86,4 +99,4 @@ void ATLLastPhotoTaken(void(^completionHandler)(UIImage *image, NSError *error)) UIImage *ATLPinPhotoForSnapshot(MKMapSnapshot *snapshot, CLLocationCoordinate2D location); -NSArray *ATLLinkResultsForText(NSString *text); +NSArray *ATLTextCheckingResultsForText(NSString *text, NSTextCheckingType linkTypes); diff --git a/Code/Utilities/ATLMessagingUtilities.m b/Code/Utilities/ATLMessagingUtilities.m index 87e32369c..126571a18 100644 --- a/Code/Utilities/ATLMessagingUtilities.m +++ b/Code/Utilities/ATLMessagingUtilities.m @@ -21,18 +21,24 @@ #import "ATLMessagingUtilities.h" #import "ATLErrors.h" #import +#import "ATLMessageCollectionViewCell.h" NSString *const ATLMIMETypeTextPlain = @"text/plain"; NSString *const ATLMIMETypeTextHTML = @"text/HTML"; NSString *const ATLMIMETypeImagePNG = @"image/png"; +NSString *const ATLMIMETypeImageGIF = @"image/gif"; +NSString *const ATLMIMETypeVideoQuickTime = @"video/quicktime"; NSString *const ATLMIMETypeImageSize = @"application/json+imageSize"; NSString *const ATLMIMETypeImageJPEG = @"image/jpeg"; NSString *const ATLMIMETypeImageJPEGPreview = @"image/jpeg+preview"; +NSString *const ATLMIMETypeImageGIFPreview = @"image/gif+preview"; NSString *const ATLMIMETypeLocation = @"location/coordinate"; NSString *const ATLMIMETypeDate = @"text/date"; - +NSString *const ATLMIMETypeVideoMP4 = @"video/mp4"; NSUInteger const ATLDefaultThumbnailSize = 512; +NSUInteger const ATLDefaultGIFThumbnailSize = 64; +NSString *const ATLPasteboardImageKey = @"image"; NSString *const ATLImagePreviewWidthKey = @"width"; NSString *const ATLImagePreviewHeightKey = @"height"; NSString *const ATLLocationLatitudeKey = @"lat"; @@ -54,6 +60,7 @@ CGFloat ATLMaxCellHeight() CGSize ATLSizeProportionallyConstrainedToSize(CGSize nativeSize, CGSize maxSize) { + if (nativeSize.width < maxSize.width && nativeSize.height < maxSize.height) return nativeSize; CGSize itemSize; CGFloat widthScale = maxSize.width / nativeSize.width; CGFloat heightScale = maxSize.height / nativeSize.height; @@ -125,6 +132,11 @@ CGRect ATLImageRectConstrainedToSize(CGSize imageSize, CGSize maxSize) return thumbRect; } +CGFloat ATLDegreeToRadians(CGFloat degrees) +{ + return ((M_PI * degrees)/ 180); +} + #pragma mark - Private Message Part Helpers CGSize ATLSizeFromOriginalSizeWithConstraint(CGSize originalSize, CGFloat constraint) @@ -184,7 +196,7 @@ void ATLAssetURLOfLastPhotoTaken(void(^completionHandler)(NSURL *assetURL, NSErr if (!group) return; // Within the group enumeration block, filter to enumerate just photos. - [group setAssetsFilter:[ALAssetsFilter allPhotos]]; + [group setAssetsFilter:[ALAssetsFilter allAssets]]; if ([group numberOfAssets] == 0) { completionHandler(nil, [NSError errorWithDomain:ATLErrorDomain code:ATLErrorNoPhotos userInfo:@{NSLocalizedDescriptionKey: @"There are no photos."}]); @@ -262,12 +274,12 @@ void ATLLastPhotoTaken(void(^completionHandler)(UIImage *image, NSError *error)) return finalImage; } -NSArray *ATLLinkResultsForText(NSString *text) +NSArray *ATLTextCheckingResultsForText(NSString *text, NSTextCheckingType linkTypes) { if (!text) return nil; NSError *error; - NSDataDetector *detector = [NSDataDetector dataDetectorWithTypes:NSTextCheckingTypeLink + NSDataDetector *detector = [NSDataDetector dataDetectorWithTypes:linkTypes error:&error]; if (error) return nil; return [detector matchesInString:text options:kNilOptions range:NSMakeRange(0, text.length)]; diff --git a/Code/Utilities/ATLUIImageHelper.h b/Code/Utilities/ATLUIImageHelper.h new file mode 100644 index 000000000..e0698857e --- /dev/null +++ b/Code/Utilities/ATLUIImageHelper.h @@ -0,0 +1,35 @@ +// +// ATLUIImageHelper.h +// Pods +// +// Created by Kabir Mahal on 3/18/15. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +#import +#import + +/** + @abstract Processes GIFs by finding frame count and duration and returns an auto-looping GIF + @param data The NSData instance that should be returned as a looping GIF + @return Returns a UIImage instance that has a looping GIF. Can be used with any UIImageView + */ +UIImage *ATLAnimatedImageWithAnimatedGIFData(NSData *data); + +/** + @abstract Processes GIFs by finding frame count and duration and returns an auto-looping GIF + @param url The NSURL instance that should be returned as a looping GIF + @return Returns a UIImage instance that has a looping GIF. Can be used with any UIImageView + */ + UIImage *ATLAnimatedImageWithAnimatedGIFURL(NSURL *url); \ No newline at end of file diff --git a/Code/Utilities/ATLUIImageHelper.m b/Code/Utilities/ATLUIImageHelper.m new file mode 100644 index 000000000..0d906054e --- /dev/null +++ b/Code/Utilities/ATLUIImageHelper.m @@ -0,0 +1,141 @@ +// +// ATLUIImageHelper.m +// Pods +// +// Created by Kabir Mahal on 3/18/15. +// +// Credit and source to: https://github.com/mayoff/uiimage-from-animated-gif + + +#import "ATLUIImageHelper.h" +#import + +#if __has_feature(objc_arc) +#define toCF (__bridge CFTypeRef) +#define fromCF (__bridge id) +#else +#define toCF (CFTypeRef) +#define fromCF (id) +#endif + +#pragma mark - Private Methods + +static int ATLDelayCentisecondsForImageAtIndex(CGImageSourceRef const source, size_t const index) +{ + int delayCentiseconds = 1; + CFDictionaryRef const properties = CGImageSourceCopyPropertiesAtIndex(source, index, NULL); + if (properties) { + CFDictionaryRef const gifProperties = CFDictionaryGetValue(properties, kCGImagePropertyGIFDictionary); + if (gifProperties) { + NSNumber *gifFrameDuration = fromCF CFDictionaryGetValue(gifProperties, kCGImagePropertyGIFUnclampedDelayTime); + if (gifFrameDuration == NULL || [gifFrameDuration doubleValue] == 0) { + gifFrameDuration = fromCF CFDictionaryGetValue(gifProperties, kCGImagePropertyGIFDelayTime); + } + if ([gifFrameDuration doubleValue] > 0) { + // Even though the GIF stores the delay as an integer number of centiseconds, ImageIO “helpfully” converts that to seconds for us. + delayCentiseconds = (int)lrint([gifFrameDuration doubleValue] * 100); + } + } + CFRelease(properties); + } + return delayCentiseconds; +} + +static void ATLCreateImagesAndDelays(CGImageSourceRef source, size_t count, CGImageRef imagesOut[count], int delayCentisecondsOut[count]) +{ + for (size_t i = 0; i < count; ++i) { + imagesOut[i] = CGImageSourceCreateImageAtIndex(source, i, NULL); + delayCentisecondsOut[i] = ATLDelayCentisecondsForImageAtIndex(source, i); + } +} + +static int ATLSum(size_t const count, int const *const values) +{ + int theSum = 0; + for (size_t i = 0; i < count; ++i) { + theSum += values[i]; + } + return theSum; +} + +static int ATLPairGCD(int duration, int gcd) +{ + if (duration < gcd) { + return ATLPairGCD(gcd, duration); + } + while (true) { + int const r = duration % gcd; + if (r == 0) { + return gcd; + } + duration = gcd; + gcd = r; + } +} + +static int ATLVectorGCD(size_t const count, int const *const values) +{ + int gcd = values[0]; + for (size_t i = 1; i < count; ++i) { + // Note that after I process the first few elements of the vector, `gcd` will probably be smaller than any remaining element. By passing the smaller value as the second argument to `pairGCD`, I avoid making it swap the arguments. + gcd = ATLPairGCD(values[i], gcd); + } + return gcd; +} + +static NSArray *ATLFrameArray(size_t const count, CGImageRef const images[count], int const delayCentiseconds[count], int const totalDurationCentiseconds) +{ + int const gcd = ATLVectorGCD(count, delayCentiseconds); + size_t const frameCount = totalDurationCentiseconds / gcd; + UIImage *frames[frameCount]; + for (size_t i = 0, f = 0; i < count; ++i) { + UIImage *const frame = [UIImage imageWithCGImage:images[i]]; + for (size_t j = delayCentiseconds[i] / gcd; j > 0; --j) { + frames[f++] = frame; + } + } + return [NSArray arrayWithObjects:frames count:frameCount]; +} + +static void ATLReleaseImages(size_t const count, CGImageRef const images[count]) +{ + for (size_t i = 0; i < count; ++i) { + CGImageRelease(images[i]); + } +} + +static UIImage *ATLAnimatedImageWithAnimatedGIFImageSource(CGImageSourceRef const source) +{ + size_t const count = CGImageSourceGetCount(source); + CGImageRef images[count]; + int delayCentiseconds[count]; // in centiseconds + ATLCreateImagesAndDelays(source, count, images, delayCentiseconds); + int const totalDurationCentiseconds = ATLSum(count, delayCentiseconds); + NSArray *const frames = ATLFrameArray(count, images, delayCentiseconds, totalDurationCentiseconds); + UIImage *const animation = [UIImage animatedImageWithImages:frames duration:(NSTimeInterval)totalDurationCentiseconds / 100.0]; + ATLReleaseImages(count, images); + return animation; +} + +static UIImage *ATLAnimatedImageWithAnimatedGIFReleasingImageSource(CGImageSourceRef CF_RELEASES_ARGUMENT source) +{ + if (source) { + UIImage *const image = ATLAnimatedImageWithAnimatedGIFImageSource(source); + CFRelease(source); + return image; + } else { + return nil; + } +} + +#pragma mark - Public Helper Methods + +UIImage *ATLAnimatedImageWithAnimatedGIFData(NSData *data) +{ + return ATLAnimatedImageWithAnimatedGIFReleasingImageSource(CGImageSourceCreateWithData(toCF data, NULL)); +} + +UIImage *ATLAnimatedImageWithAnimatedGIFURL(NSURL *url) +{ + return ATLAnimatedImageWithAnimatedGIFReleasingImageSource(CGImageSourceCreateWithURL(toCF url, NULL)); +} diff --git a/Code/Views/ATLAddressBarTextView.m b/Code/Views/ATLAddressBarTextView.m index 477fb8ed7..eebbad5c9 100644 --- a/Code/Views/ATLAddressBarTextView.m +++ b/Code/Views/ATLAddressBarTextView.m @@ -77,7 +77,7 @@ - (void)lyr_commonInit self.toLabel = [UILabel new]; self.toLabel.translatesAutoresizingMaskIntoConstraints = NO; - self.toLabel.text = @"To:"; + self.toLabel.text = ATLLocalizedString(@"atl.addressbar.textview.tolabel.key", @"To:", nil); self.toLabel.textColor = [UIColor grayColor]; self.toLabel.font = self.addressBarFont; [self addSubview:self.toLabel]; diff --git a/Code/Views/ATLAvatarImageView.h b/Code/Views/ATLAvatarImageView.h index d1d07e121..d5626dc2c 100644 --- a/Code/Views/ATLAvatarImageView.h +++ b/Code/Views/ATLAvatarImageView.h @@ -57,4 +57,9 @@ extern CGFloat const ATLAvatarImageDiameter; */ @property (nonatomic) UIColor *imageViewBackgroundColor UI_APPEARANCE_SELECTOR; +/** + @abstract Sets the avatar item, image view, and initial view to nil in preparation for reuse. + */ +- (void)resetView; + @end diff --git a/Code/Views/ATLAvatarImageView.m b/Code/Views/ATLAvatarImageView.m index 65c325450..6d41b0b0b 100644 --- a/Code/Views/ATLAvatarImageView.m +++ b/Code/Views/ATLAvatarImageView.m @@ -23,6 +23,7 @@ @interface ATLAvatarImageView () @property (nonatomic) UILabel *initialsLabel; +@property (nonatomic) NSURLSessionDownloadTask *downloadTask; @end @@ -30,6 +31,17 @@ @implementation ATLAvatarImageView NSString *const ATLAvatarImageViewAccessibilityLabel = @"ATLAvatarImageViewAccessibilityLabel"; + ++ (NSCache *)sharedImageCache +{ + static NSCache *_sharedImageCache; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + _sharedImageCache = [NSCache new]; + }); + return _sharedImageCache; +} + + (void)initialize { ATLAvatarImageView *proxy = [self appearance]; @@ -82,11 +94,29 @@ - (CGSize)intrinsicContentSize return CGSizeMake(self.avatarImageViewDiameter, self.avatarImageViewDiameter); } +- (void)resetView +{ + self.avatarItem = nil; + self.image = nil; + self.initialsLabel.text = nil; + [self.downloadTask cancel]; +} + +- (void)dealloc +{ + [self.downloadTask cancel]; +} + - (void)setAvatarItem:(id)avatarItem { - if (avatarItem.avatarImage) { + if ([avatarItem avatarImageURL]) { + self.initialsLabel.text = nil; + [self loadAvatarImageWithURL:[avatarItem avatarImageURL]]; + } else if (avatarItem.avatarImage) { + self.initialsLabel.text = nil; self.image = avatarItem.avatarImage; } else if (avatarItem.avatarInitials) { + self.image = nil; self.initialsLabel.text = avatarItem.avatarInitials; } _avatarItem = avatarItem; @@ -116,6 +146,53 @@ - (void)setImageViewBackgroundColor:(UIColor *)imageViewBackgroundColor self.backgroundColor = imageViewBackgroundColor; _imageViewBackgroundColor = imageViewBackgroundColor; } + +- (void)loadAvatarImageWithURL:(NSURL *)imageURL +{ + if (![imageURL isKindOfClass:[NSURL class]] || imageURL.absoluteString.length == 0) { + NSLog(@"Cannot fetch image without URL"); + return; + } + + // Check if image is in cache + __block NSString *stringURL = imageURL.absoluteString; + UIImage *image = [[[self class] sharedImageCache] objectForKey:stringURL]; + if (image) { + self.image = image; + return; + } + + // If not, fetch the image and add to the cache + [self fetchImageFromRemoteImageURL:imageURL]; +} + +- (void)fetchImageFromRemoteImageURL:(NSURL *)remoteImageURL +{ + self.downloadTask = [[NSURLSession sharedSession] downloadTaskWithURL:remoteImageURL completionHandler:^(NSURL *location, NSURLResponse *response, NSError *error) { + if (!error && location) { + __block UIImage *image = [UIImage imageWithData:[NSData dataWithContentsOfURL:location]]; + if (image) { + [[[self class] sharedImageCache] setObject:image forKey:remoteImageURL.absoluteString cost:0]; + dispatch_async(dispatch_get_main_queue(), ^{ + [self updateWithImage:image forRemoteImageURL:remoteImageURL]; + }); + } + } + }]; + [self.downloadTask resume]; +} + +- (void)updateWithImage:(UIImage *)image forRemoteImageURL:(NSURL *)remoteImageURL; +{ + [UIView animateWithDuration:0.2 animations:^{ + self.alpha = 0.0; + } completion:^(BOOL finished) { + [UIView animateWithDuration:0.5 animations:^{ + self.image = image; + self.alpha = 1.0; + }]; + }]; +} - (void)configureInitialsLabelConstraint { diff --git a/Code/Views/ATLConversationCollectionViewHeader.m b/Code/Views/ATLConversationCollectionViewHeader.m index bebfda008..beaf349ef 100644 --- a/Code/Views/ATLConversationCollectionViewHeader.m +++ b/Code/Views/ATLConversationCollectionViewHeader.m @@ -85,6 +85,7 @@ - (void)lyr_commonInit self.participantLabel.font = _participantLabelFont; self.participantLabel.textColor = _participantLabelTextColor; self.participantLabel.translatesAutoresizingMaskIntoConstraints = NO; + self.participantLabel.accessibilityLabel = ATLConversationViewHeaderIdentifier; [self addSubview:self.participantLabel]; [self configureDateLabelConstraints]; diff --git a/Code/Views/ATLConversationTableViewCell.m b/Code/Views/ATLConversationTableViewCell.m index a99a9ab56..21437a835 100644 --- a/Code/Views/ATLConversationTableViewCell.m +++ b/Code/Views/ATLConversationTableViewCell.m @@ -78,9 +78,6 @@ @implementation ATLConversationTableViewCell static CGFloat const ATLUnreadMessageCountLabelSize = 14.0f; static CGFloat const ATLChevronIconViewRightPadding = 14.0f; -NSString *const ATLImageMIMETypePlaceholderText = @"Attachment: Image"; -NSString *const ATLLocationMIMETypePlaceholderText = @"Attachment: Location"; - + (void)initialize { // UIAppearance Proxy Defaults @@ -199,6 +196,7 @@ - (void)setSelected:(BOOL)selected animated:(BOOL)animated - (void)prepareForReuse { [super prepareForReuse]; + [self.conversationImageView resetView]; self.conversationImageView.hidden = YES; [self setNeedsUpdateConstraints]; } @@ -258,24 +256,14 @@ - (void)setCellBackgroundColor:(UIColor *)cellBackgroundColor - (void)presentConversation:(LYRConversation *)conversation { self.dateLabel.text = [self dateLabelForLastMessage:conversation.lastMessage]; - - LYRMessage *message = conversation.lastMessage; - LYRMessagePart *messagePart = message.parts.firstObject; - if ([messagePart.MIMEType isEqualToString:ATLMIMETypeTextPlain]) { - NSString *messageText = [[NSString alloc] initWithData:messagePart.data encoding:NSUTF8StringEncoding]; - self.lastMessageLabel.attributedText = [self attributedStringForMessageText:messageText]; - } else if ([messagePart.MIMEType isEqualToString:ATLMIMETypeImageJPEG]) { - self.lastMessageLabel.text = ATLImageMIMETypePlaceholderText; - } else if ([messagePart.MIMEType isEqualToString:ATLMIMETypeImagePNG]) { - self.lastMessageLabel.text = ATLImageMIMETypePlaceholderText; - } else if ([messagePart.MIMEType isEqualToString:ATLMIMETypeLocation]) { - self.lastMessageLabel.text = ATLLocationMIMETypePlaceholderText; - } else { - self.lastMessageLabel.text = ATLImageMIMETypePlaceholderText; - } [self updateUnreadMessageIndicatorWithConversation:conversation]; } +- (void)updateWithLastMessageText:(NSString *)lastMessageText +{ + self.lastMessageLabel.attributedText = [self attributedStringForMessageText:lastMessageText]; +} + - (NSAttributedString *)attributedStringForMessageText:(NSString *)messageText { NSMutableParagraphStyle *paragraphStyle = [[NSMutableParagraphStyle alloc] init]; diff --git a/Code/Views/ATLIncomingMessageCollectionViewCell.m b/Code/Views/ATLIncomingMessageCollectionViewCell.m index dbf73c67d..a45cba49e 100644 --- a/Code/Views/ATLIncomingMessageCollectionViewCell.m +++ b/Code/Views/ATLIncomingMessageCollectionViewCell.m @@ -20,19 +20,17 @@ #import "ATLIncomingMessageCollectionViewCell.h" -@interface ATLIncomingMessageCollectionViewCell () +NSString *const ATLIncomingMessageCellIdentifier = @"ATLIncomingMessageCellIdentifier"; + +@interface ATLMessageCollectionViewCell () -@property (nonatomic) NSLayoutConstraint *bubbleWithAvatarLeftConstraint; -@property (nonatomic) NSLayoutConstraint *bubbleWithoutAvatarLeftConstraint; +@property (nonatomic) NSLayoutConstraint *bubbleWithAvatarLeadConstraint; +@property (nonatomic) NSLayoutConstraint *bubbleWithoutAvatarLeadConstraint; @end @implementation ATLIncomingMessageCollectionViewCell -NSString *const ATLIncomingMessageCellIdentifier = @"ATLIncomingMessageCellIdentifier"; -CGFloat const ATLAvatarImageLeftPadding = 12.0f; -CGFloat const ATLAvatarImageRightPadding = 7.0f; - + (void)initialize { ATLIncomingMessageCollectionViewCell *proxy = [self appearance]; @@ -63,38 +61,12 @@ - (void)lyr_incommingCommonInit [self configureConstraintsForIncomingMessage]; } -- (void)shouldDisplayAvatarItem:(BOOL)shouldDisplayAvatarItem -{ - NSArray *constraints = [self.contentView constraints]; - if (shouldDisplayAvatarItem) { - if ([constraints containsObject:self.bubbleWithAvatarLeftConstraint]) return; - [self.contentView removeConstraint:self.bubbleWithoutAvatarLeftConstraint]; - [self.contentView addConstraint:self.bubbleWithAvatarLeftConstraint]; - } else { - if ([constraints containsObject:self.bubbleWithoutAvatarLeftConstraint]) return; - [self.contentView removeConstraint:self.bubbleWithAvatarLeftConstraint]; - [self.contentView addConstraint:self.bubbleWithoutAvatarLeftConstraint]; - } - [self setNeedsUpdateConstraints]; -} - -- (void)updateWithSender:(id)sender -{ - if (sender) { - self.avatarImageView.hidden = NO; - self.avatarImageView.avatarItem = sender; - } else { - self.avatarImageView.hidden = YES; - } -} - - (void)configureConstraintsForIncomingMessage { - [self.contentView addConstraint:[NSLayoutConstraint constraintWithItem:self.avatarImageView attribute:NSLayoutAttributeLeft relatedBy:NSLayoutRelationEqual toItem:self.contentView attribute:NSLayoutAttributeLeft multiplier:1.0 constant:ATLAvatarImageLeftPadding]]; - [self.contentView addConstraint:[NSLayoutConstraint constraintWithItem:self.avatarImageView attribute:NSLayoutAttributeBottom relatedBy:NSLayoutRelationEqual toItem:self.contentView attribute:NSLayoutAttributeBottom multiplier:1.0 constant:0]]; - self.bubbleWithAvatarLeftConstraint = [NSLayoutConstraint constraintWithItem:self.bubbleView attribute:NSLayoutAttributeLeft relatedBy:NSLayoutRelationEqual toItem:self.avatarImageView attribute:NSLayoutAttributeRight multiplier:1.0 constant:ATLAvatarImageRightPadding]; - [self.contentView addConstraint:self.bubbleWithAvatarLeftConstraint]; - self.bubbleWithoutAvatarLeftConstraint = [NSLayoutConstraint constraintWithItem:self.bubbleView attribute:NSLayoutAttributeLeft relatedBy:NSLayoutRelationEqual toItem:self.contentView attribute:NSLayoutAttributeLeft multiplier:1.0 constant:ATLMessageCellHorizontalMargin]; + [self.contentView addConstraint:[NSLayoutConstraint constraintWithItem:self.avatarImageView attribute:NSLayoutAttributeLeft relatedBy:NSLayoutRelationEqual toItem:self.contentView attribute:NSLayoutAttributeLeft multiplier:1.0 constant:ATLAvatarImageLeadPadding]]; + self.bubbleWithAvatarLeadConstraint = [NSLayoutConstraint constraintWithItem:self.bubbleView attribute:NSLayoutAttributeLeft relatedBy:NSLayoutRelationEqual toItem:self.avatarImageView attribute:NSLayoutAttributeRight multiplier:1.0 constant:ATLAvatarImageTailPadding]; + [self.contentView addConstraint:self.bubbleWithAvatarLeadConstraint]; + self.bubbleWithoutAvatarLeadConstraint = [NSLayoutConstraint constraintWithItem:self.bubbleView attribute:NSLayoutAttributeLeft relatedBy:NSLayoutRelationEqual toItem:self.contentView attribute:NSLayoutAttributeLeft multiplier:1.0 constant:ATLMessageCellHorizontalMargin]; } @end diff --git a/Code/Views/ATLMessageBubbleView.h b/Code/Views/ATLMessageBubbleView.h index 968e7a3a7..473951a63 100644 --- a/Code/Views/ATLMessageBubbleView.h +++ b/Code/Views/ATLMessageBubbleView.h @@ -33,6 +33,11 @@ extern CGFloat const ATLMessageBubbleDefaultHeight; */ extern NSString *const ATLUserDidTapLinkNotification; +/** + @abstract Posted when a user taps a phone number in a message bubble. + */ +extern NSString *const ATLUserDidTapPhoneNumberNotification; + /** @abstract The `ATLMessageBubbleView` class provides a lightweight, customizable view that handles displaying the actual message content within a collection view cell. @@ -51,6 +56,11 @@ extern NSString *const ATLUserDidTapLinkNotification; */ - (void)updateWithImage:(UIImage *)image width:(CGFloat)width; +/** + @abstract Tells the bubble view to display the thumbnail for a video. + */ +- (void)updateWithVideoThumbnail:(UIImage *)image width:(CGFloat)width; + /** @abstract Tells the bubble view to display a map image for a given location. */ @@ -79,4 +89,20 @@ extern NSString *const ATLUserDidTapLinkNotification; */ @property (nonatomic) UIImageView *bubbleImageView; +/** + @abstract The NSTextCheckingTypes that are set as links + @discussion Currently supports NSTextCheckingTypeLink and NSTextCheckingTypePhoneNumber + @default NSTextCheckingTypeLink + */ +@property (nonatomic) NSTextCheckingType textCheckingTypes; + +/** + @abstract `UIMenuItem` objects that the application wants to display on long press, overriding the default copy functionality. + @discussion If set to nil or an empty array, this will deactivate all menu controller actions. This property can be set in the + `ATLConversationViewController` subclass by implementing the delegate `conversationViewController:configureCell:forMessage` and + overriding this property's default value. + @default `UIMenuItem` copy action. + */ +@property (nonatomic) NSArray *menuControllerActions; + @end diff --git a/Code/Views/ATLMessageBubbleView.m b/Code/Views/ATLMessageBubbleView.m index 98e7a39a0..5caf0b872 100644 --- a/Code/Views/ATLMessageBubbleView.m +++ b/Code/Views/ATLMessageBubbleView.m @@ -20,6 +20,7 @@ #import "ATLMessageBubbleView.h" #import "ATLMessagingUtilities.h" +#import "ATLPlayView.h" CGFloat const ATLMessageBubbleLabelVerticalPadding = 8.0f; CGFloat const ATLMessageBubbleLabelHorizontalPadding = 13.0f; @@ -29,10 +30,12 @@ CGFloat const ATLMessageBubbleDefaultHeight = 40.0f; NSString *const ATLUserDidTapLinkNotification = @"ATLUserDidTapLinkNotification"; +NSString *const ATLUserDidTapPhoneNumberNotification = @"ATLUserDidTapPhoneNumberNotification"; typedef NS_ENUM(NSInteger, ATLBubbleViewContentType) { ATLBubbleViewContentTypeText, ATLBubbleViewContentTypeImage, + ATLBubbleViewContentTypeVideo, ATLBubbleViewContentTypeLocation, }; @@ -40,16 +43,20 @@ @interface ATLMessageBubbleView () @property (nonatomic) ATLBubbleViewContentType contentType; @property (nonatomic) UIView *longPressMask; +@property (nonatomic) NSString *tappedPhoneNumber; @property (nonatomic) CLLocationCoordinate2D locationShown; @property (nonatomic) UITapGestureRecognizer *tapGestureRecognizer; +@property (nonatomic) UIPanGestureRecognizer *panGestureRecognizer; +@property (nonatomic) UILongPressGestureRecognizer *longPressGestureRecognizer; @property (nonatomic) NSURL *tappedURL; @property (nonatomic) NSLayoutConstraint *imageWidthConstraint; @property (nonatomic) MKMapSnapshotter *snapshotter; @property (nonatomic) ATLProgressView *progressView; +@property (nonatomic) ATLPlayView *playView; @end -@implementation ATLMessageBubbleView +@implementation ATLMessageBubbleView + (NSCache *)sharedCache { @@ -74,12 +81,20 @@ - (id)initWithFrame:(CGRect)frame _bubbleViewLabel.translatesAutoresizingMaskIntoConstraints = NO; [_bubbleViewLabel setContentCompressionResistancePriority:UILayoutPriorityDefaultHigh + 1 forAxis:UILayoutConstraintAxisHorizontal]; [self addSubview:_bubbleViewLabel]; - + + _textCheckingTypes = NSTextCheckingTypeLink; + _bubbleImageView = [[UIImageView alloc] init]; _bubbleImageView.translatesAutoresizingMaskIntoConstraints = NO; _bubbleImageView.contentMode = UIViewContentModeScaleAspectFill; [self addSubview:_bubbleImageView]; + _playView = [[ATLPlayView alloc]initWithFrame:CGRectMake(0,0, 128.0f, 128.0f)]; + _playView.translatesAutoresizingMaskIntoConstraints = NO; + _playView.backgroundColor = [UIColor clearColor]; + _playView.hidden = YES; + [self addSubview:_playView]; + _progressView = [[ATLProgressView alloc] initWithFrame:CGRectMake(0, 0, 128.0f, 128.0f)]; _progressView.translatesAutoresizingMaskIntoConstraints = NO; _progressView.alpha = 1.0f; @@ -88,13 +103,22 @@ - (id)initWithFrame:(CGRect)frame [self configureBubbleViewLabelConstraints]; [self configureBubbleImageViewConstraints]; [self configureProgressViewConstraints]; - + [self configurePlayViewConstraints]; + _tapGestureRecognizer = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(handleLabelTap:)]; _tapGestureRecognizer.delegate = self; [self.bubbleViewLabel addGestureRecognizer:_tapGestureRecognizer]; - UILongPressGestureRecognizer *gestureRecognizer = [[UILongPressGestureRecognizer alloc] initWithTarget:self action:@selector(handleLongPress:)]; - [self addGestureRecognizer:gestureRecognizer]; + _panGestureRecognizer = [[UIPanGestureRecognizer alloc] initWithTarget:self action:@selector(handlePan:)]; + _panGestureRecognizer.delegate = self; + [self addGestureRecognizer:_panGestureRecognizer]; + + _longPressGestureRecognizer = [[UILongPressGestureRecognizer alloc] initWithTarget:self action:@selector(handleLongPress:)]; + _longPressGestureRecognizer.delegate = self; + [self addGestureRecognizer:_longPressGestureRecognizer]; + + UIMenuItem *resetMenuItem = [[UIMenuItem alloc] initWithTitle:@"Copy" action:@selector(copyItem)]; + _menuControllerActions = @[resetMenuItem]; [self prepareForReuse]; } @@ -113,6 +137,7 @@ - (void)prepareForReuse { self.bubbleImageView.image = nil; [self applyImageWidthConstraint:NO]; + self.playView.hidden = YES; [self setBubbleViewContentType:ATLBubbleViewContentTypeText]; } @@ -131,16 +156,26 @@ - (void)updateWithImage:(UIImage *)image width:(CGFloat)width [self setBubbleViewContentType:ATLBubbleViewContentTypeImage]; } +- (void)updateWithVideoThumbnail:(UIImage *)image width:(CGFloat)width +{ + self.bubbleImageView.image = image; + self.imageWidthConstraint.constant = width; + self.playView.hidden = NO; + [self applyImageWidthConstraint:YES]; + [self setBubbleViewContentType:ATLBubbleViewContentTypeVideo]; +} + - (void)updateWithLocation:(CLLocationCoordinate2D)location { self.imageWidthConstraint.constant = ATLMaxCellWidth(); [self applyImageWidthConstraint:YES]; [self setBubbleViewContentType:ATLBubbleViewContentTypeLocation]; [self setNeedsUpdateConstraints]; - + NSString *cachedImageIdentifier = [NSString stringWithFormat:@"%f,%f", location.latitude, location.longitude]; UIImage *cachedImage = [[[self class] sharedCache] objectForKey:cachedImageIdentifier]; if (cachedImage) { + self.locationShown = location; self.bubbleImageView.image = cachedImage; self.bubbleImageView.contentMode = UIViewContentModeScaleAspectFill; self.bubbleImageView.hidden = NO; @@ -159,7 +194,7 @@ - (void)updateWithLocation:(CLLocationCoordinate2D)location self.bubbleImageView.image = ATLPinPhotoForSnapshot(snapshot, location); self.locationShown = location; [[[self class] sharedCache] setObject:self.bubbleImageView.image forKey:cachedImageIdentifier]; - + // Animate into view. self.bubbleImageView.alpha = 0.0; [UIView animateWithDuration:0.2 animations:^{ @@ -196,6 +231,13 @@ - (void)setBubbleViewContentType:(ATLBubbleViewContentType)contentType self.bubbleViewLabel.text = nil; break; + case ATLBubbleViewContentTypeVideo: + self.bubbleViewLabel.hidden = YES; + self.bubbleImageView.hidden = NO; + self.locationShown = kCLLocationCoordinate2DInvalid; + self.bubbleViewLabel.text = nil; + break; + case ATLBubbleViewContentTypeLocation: self.locationShown = kCLLocationCoordinate2DInvalid; self.bubbleImageView.hidden = YES; @@ -224,6 +266,16 @@ - (void)applyImageWidthConstraint:(BOOL)applyImageWidthConstraint } } +- (void)setMenuControllerActions:(NSArray *)menuControllerActions +{ + for (id object in menuControllerActions) { + if (![object isKindOfClass:[UIMenuItem class]]) { + [NSException raise:NSInternalInconsistencyException format:@"Menu controller actions must be of type UIMenuItem"]; + } + } + _menuControllerActions = menuControllerActions; +} + #pragma mark - Copy / Paste Support - (void)copyItem @@ -232,14 +284,20 @@ - (void)copyItem if (!self.bubbleViewLabel.isHidden) { pasteboard.string = self.bubbleViewLabel.text; } else { - pasteboard.image = self.bubbleImageView.image; + NSData *imageData = UIImagePNGRepresentation(self.bubbleImageView.image); + [pasteboard setData:imageData forPasteboardType:ATLPasteboardImageKey]; } } - (void)menuControllerDisappeared { - [self.longPressMask removeFromSuperview]; - self.longPressMask = nil; + [UIView animateWithDuration:0.1 animations:^{ + self.longPressMask.alpha = 0; + } completion:^(BOOL finished) { + [self.longPressMask removeFromSuperview]; + self.longPressMask = nil; + }]; + [[UIMenuController sharedMenuController] setMenuItems:nil]; [[NSNotificationCenter defaultCenter] removeObserver:self]; } @@ -252,7 +310,9 @@ - (BOOL)canBecomeFirstResponder - (void)handleLongPress:(UILongPressGestureRecognizer *)recognizer { - if ([recognizer state] == UIGestureRecognizerStateBegan) { + if ([recognizer state] == UIGestureRecognizerStateBegan && !self.longPressMask) { + + if (!self.menuControllerActions || self.menuControllerActions.count == 0) return; [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(menuControllerDisappeared) @@ -264,17 +324,60 @@ - (void)handleLongPress:(UILongPressGestureRecognizer *)recognizer self.longPressMask = [[UIView alloc] initWithFrame:CGRectMake(0, 0, self.frame.size.width, self.frame.size.height)]; self.longPressMask.autoresizingMask = UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleHeight; self.longPressMask.backgroundColor = [UIColor blackColor]; - self.longPressMask.alpha = 0.1; + self.longPressMask.alpha = 0; + [UIView animateWithDuration:0.1 animations:^{ + self.longPressMask.alpha = 0.1; + }]; [self addSubview:self.longPressMask]; UIMenuController *menuController = [UIMenuController sharedMenuController]; - UIMenuItem *resetMenuItem = [[UIMenuItem alloc] initWithTitle:@"Copy" action:@selector(copyItem)]; - [menuController setMenuItems:@[resetMenuItem]]; - [menuController setTargetRect:CGRectMake(self.frame.size.width / 2, 0.0f, 0.0f, 0.0f) inView:self]; + [menuController setMenuItems:self.menuControllerActions]; + + // If we're in a scroll view, we might need to position the UIMenuController differently + UIView *superview = self.superview; + while (superview && ![superview isKindOfClass:[UIScrollView class]]) { + superview = superview.superview; + } + if ([superview isKindOfClass:[UIScrollView class]]) { + UIScrollView *containingScrollView = (UIScrollView *)superview; + CGPoint contentOffset = containingScrollView.contentOffset; + CGRect frame = containingScrollView.frame; + CGRect messageRect = [self convertRect:self.frame toView:superview]; + + // Top of the message bubble is not appropriate + CGFloat standardMargin = 8.0f; + CGFloat topVisibleY = contentOffset.y + containingScrollView.contentInset.top; + if (messageRect.origin.y <= topVisibleY + standardMargin) { + // Bottom of the message bubble is not appropriate either + CGFloat bottomVisibleY = contentOffset.y + frame.size.height - containingScrollView.contentInset.bottom; + if (messageRect.origin.y + messageRect.size.height >= bottomVisibleY - standardMargin) { + // Get midpoint of the visible portion of the message bubble + CGFloat middleVisibleY = topVisibleY + (frame.size.height - containingScrollView.contentInset.bottom) / 2 - messageRect.origin.y; + [menuController setTargetRect:CGRectMake(self.frame.size.width / 2, middleVisibleY, 0.0f, 0.0f) inView:self]; + menuController.arrowDirection = UIMenuControllerArrowDefault; + } else { + [menuController setTargetRect:CGRectMake(self.frame.size.width / 2, self.frame.size.height, 0.0f, 0.0f) inView:self]; + menuController.arrowDirection = UIMenuControllerArrowUp; + } + } else { + [menuController setTargetRect:CGRectMake(self.frame.size.width / 2, 0.0f, 0.0f, 0.0f) inView:self]; + menuController.arrowDirection = UIMenuControllerArrowDefault; + } + } else { + [menuController setTargetRect:CGRectMake(self.frame.size.width / 2, 0.0f, 0.0f, 0.0f) inView:self]; + menuController.arrowDirection = UIMenuControllerArrowDefault; + } + self.panGestureRecognizer.enabled = NO; [menuController setMenuVisible:YES animated:YES]; + } else if ([recognizer state] == UIGestureRecognizerStateEnded) { + self.panGestureRecognizer.enabled = YES; } } +- (void)handlePan:(UIPanGestureRecognizer *)recognizer { + [[UIMenuController sharedMenuController] setMenuVisible:NO animated:YES]; +} + #pragma mark - UIGestureRecognizerDelegate - (BOOL)gestureRecognizerShouldBegin:(UIGestureRecognizer *)gestureRecognizer @@ -300,9 +403,16 @@ - (BOOL)gestureRecognizerShouldBegin:(UIGestureRecognizer *)gestureRecognizer NSUInteger characterIndex = [layoutManager characterIndexForPoint:tapLocation inTextContainer:textContainer fractionOfDistanceBetweenInsertionPoints:NULL]; - NSArray *results = ATLLinkResultsForText(self.bubbleViewLabel.attributedText.string); + NSArray *results = ATLTextCheckingResultsForText(self.bubbleViewLabel.attributedText.string, self.textCheckingTypes); for (NSTextCheckingResult *result in results) { if (NSLocationInRange(characterIndex, result.range)) { + if (result.resultType == NSTextCheckingTypeLink && self.textCheckingTypes & NSTextCheckingTypeLink) { + self.tappedURL = result.URL; + return YES; + } else if (result.resultType == NSTextCheckingTypePhoneNumber && self.textCheckingTypes & NSTextCheckingTypePhoneNumber) { + self.tappedPhoneNumber = result.phoneNumber; + return YES; + } self.tappedURL = result.URL; return YES; } @@ -310,12 +420,30 @@ - (BOOL)gestureRecognizerShouldBegin:(UIGestureRecognizer *)gestureRecognizer return NO; } +- (BOOL)gestureRecognizer:(UIGestureRecognizer *)gestureRecognizer +shouldRecognizeSimultaneouslyWithGestureRecognizer:(UIGestureRecognizer *)otherGestureRecognizer { + if (gestureRecognizer == self.panGestureRecognizer || otherGestureRecognizer == self.panGestureRecognizer) { + return YES; + } + if ((gestureRecognizer == self.longPressGestureRecognizer || otherGestureRecognizer == self.longPressGestureRecognizer) && (!self.menuControllerActions || self.menuControllerActions.count == 0)) { + return YES; + } + return NO; +} + #pragma mark - Actions - (void)handleLabelTap:(UITapGestureRecognizer *)tapGestureRecognizer { - [[NSNotificationCenter defaultCenter] postNotificationName:ATLUserDidTapLinkNotification object:self.tappedURL]; - self.tappedURL = nil; + if (self.tappedURL) { + [[NSNotificationCenter defaultCenter] postNotificationName:ATLUserDidTapLinkNotification object:self.tappedURL]; + self.tappedURL = nil; + } + + if (self.tappedPhoneNumber) { + [[NSNotificationCenter defaultCenter] postNotificationName:ATLUserDidTapPhoneNumberNotification object:self.tappedPhoneNumber]; + self.tappedURL = nil; + } } - (void)dealloc @@ -330,7 +458,9 @@ - (void)configureBubbleViewLabelConstraints [self addConstraint:[NSLayoutConstraint constraintWithItem:_bubbleViewLabel attribute:NSLayoutAttributeTop relatedBy:NSLayoutRelationEqual toItem:self attribute:NSLayoutAttributeTop multiplier:1.0 constant:ATLMessageBubbleLabelVerticalPadding]]; [self addConstraint:[NSLayoutConstraint constraintWithItem:_bubbleViewLabel attribute:NSLayoutAttributeLeft relatedBy:NSLayoutRelationEqual toItem:self attribute:NSLayoutAttributeLeft multiplier:1.0 constant:ATLMessageBubbleLabelHorizontalPadding]]; [self addConstraint:[NSLayoutConstraint constraintWithItem:_bubbleViewLabel attribute:NSLayoutAttributeRight relatedBy:NSLayoutRelationEqual toItem:self attribute:NSLayoutAttributeRight multiplier:1.0 constant:-ATLMessageBubbleLabelHorizontalPadding]]; - [self addConstraint:[NSLayoutConstraint constraintWithItem:_bubbleViewLabel attribute:NSLayoutAttributeBottom relatedBy:NSLayoutRelationGreaterThanOrEqual toItem:self attribute:NSLayoutAttributeBottom multiplier:1.0 constant:-ATLMessageBubbleLabelVerticalPadding]]; + NSLayoutConstraint *bottomConstraint = [NSLayoutConstraint constraintWithItem:_bubbleViewLabel attribute:NSLayoutAttributeBottom relatedBy:NSLayoutRelationLessThanOrEqual toItem:self attribute:NSLayoutAttributeBottom multiplier:1.0 constant:-ATLMessageBubbleLabelVerticalPadding]; + bottomConstraint.priority = 800; + [self addConstraint:bottomConstraint]; } - (void)configureBubbleImageViewConstraints @@ -340,7 +470,6 @@ - (void)configureBubbleImageViewConstraints [self addConstraint:[NSLayoutConstraint constraintWithItem:_bubbleImageView attribute:NSLayoutAttributeCenterX relatedBy:NSLayoutRelationEqual toItem:self attribute:NSLayoutAttributeCenterX multiplier:1.0 constant:0]]; [self addConstraint:[NSLayoutConstraint constraintWithItem:_bubbleImageView attribute:NSLayoutAttributeCenterY relatedBy:NSLayoutRelationEqual toItem:self attribute:NSLayoutAttributeCenterY multiplier:1.0 constant:0]]; _imageWidthConstraint = [NSLayoutConstraint constraintWithItem:_bubbleImageView attribute:NSLayoutAttributeWidth relatedBy:NSLayoutRelationEqual toItem:nil attribute:NSLayoutAttributeNotAnAttribute multiplier:1.0 constant:0]; - } - (void)configureProgressViewConstraints @@ -351,4 +480,12 @@ - (void)configureProgressViewConstraints [self addConstraint:[NSLayoutConstraint constraintWithItem:_progressView attribute:NSLayoutAttributeHeight relatedBy:NSLayoutRelationEqual toItem:nil attribute:NSLayoutAttributeNotAnAttribute multiplier:1.0 constant:64.0f]]; } +- (void)configurePlayViewConstraints +{ + [self addConstraint:[NSLayoutConstraint constraintWithItem:_playView attribute:NSLayoutAttributeCenterX relatedBy:NSLayoutRelationEqual toItem:self attribute:NSLayoutAttributeCenterX multiplier:1.0 constant:0]]; + [self addConstraint:[NSLayoutConstraint constraintWithItem:_playView attribute:NSLayoutAttributeCenterY relatedBy:NSLayoutRelationEqual toItem:self attribute:NSLayoutAttributeCenterY multiplier:1.0 constant:0]]; + [self addConstraint:[NSLayoutConstraint constraintWithItem:_playView attribute:NSLayoutAttributeWidth relatedBy:NSLayoutRelationEqual toItem:nil attribute:NSLayoutAttributeNotAnAttribute multiplier:1.0 constant:64.0f]]; + [self addConstraint:[NSLayoutConstraint constraintWithItem:_playView attribute:NSLayoutAttributeHeight relatedBy:NSLayoutRelationEqual toItem:nil attribute:NSLayoutAttributeNotAnAttribute multiplier:1.0 constant:64.0f]]; +} + @end diff --git a/Code/Views/ATLMessageCollectionViewCell.h b/Code/Views/ATLMessageCollectionViewCell.h index 249a476f6..355127d91 100644 --- a/Code/Views/ATLMessageCollectionViewCell.h +++ b/Code/Views/ATLMessageCollectionViewCell.h @@ -25,6 +25,12 @@ #import "ATLAvatarImageView.h" extern CGFloat const ATLMessageCellHorizontalMargin; +extern NSString *const ATLGIFAccessibilityLabel; +extern NSString *const ATLImageAccessibilityLabel; +extern NSString *const ATLVideoAccessibilityLabel; + +extern CGFloat const ATLAvatarImageLeadPadding; +extern CGFloat const ATLAvatarImageTailPadding; /** @abstract The `ATLMessageCollectionViewCell` class provides a lightweight, customizable collection @@ -53,6 +59,13 @@ extern CGFloat const ATLMessageCellHorizontalMargin; */ @property (nonatomic) UIColor *bubbleViewColor UI_APPEARANCE_SELECTOR; +/** + @abstract The NSTextCheckingTypes that will be passed to the bubbleView + @discussion Currently supports NSTextCheckingTypeLink and NSTextCheckingTypePhoneNumber + @default NSTextCheckingTypeLink +*/ +@property (nonatomic) NSTextCheckingType messageTextCheckingTypes UI_APPEARANCE_SELECTOR; + /** @abstract The corner radius of the bubble view displayed in the cell. Default is 16. */ @@ -68,6 +81,11 @@ extern CGFloat const ATLMessageCellHorizontalMargin; */ @property (nonatomic) ATLAvatarImageView *avatarImageView; +/** + @abstract The `LYRMessage` object passed in `ATLMessagePresenting` protocol `presentMessage:`. + */ +@property (nonatomic) LYRMessage *message; + /** @abstract Performs calculations to determine a cell's height. @param message The `LYRMessage` object that will be displayed in the cell. diff --git a/Code/Views/ATLMessageCollectionViewCell.m b/Code/Views/ATLMessageCollectionViewCell.m index 709974f49..edacaf7c4 100644 --- a/Code/Views/ATLMessageCollectionViewCell.m +++ b/Code/Views/ATLMessageCollectionViewCell.m @@ -20,34 +20,57 @@ #import "ATLMessageCollectionViewCell.h" #import "ATLMessagingUtilities.h" +#import "ATLUIImageHelper.h" #import "ATLIncomingMessageCollectionViewCell.h" #import "ATLOutgoingMessageCollectionViewCell.h" -#import +#import + +NSString *const ATLGIFAccessibilityLabel = @"Message: GIF"; +NSString *const ATLImageAccessibilityLabel = @"Message: Image"; +NSString *const ATLVideoAccessibilityLabel = @"Message: Video"; +static char const ATLMessageCollectionViewCellImageProcessingConcurrentQueue[] = "com.layer.Atlas.ATLMessageCollectionViewCell.imageProcessingConcurrentQueue"; + +CGFloat const ATLMessageCellMinimumHeight = 10.0f; +CGFloat const ATLMessageCellHorizontalMargin = 16.0f; +CGFloat const ATLAvatarImageLeadPadding = 12.0f; +CGFloat const ATLAvatarImageTailPadding = 7.0f; +NSInteger const kATLSharedCellTag = 1000; @interface ATLMessageCollectionViewCell () @property (nonatomic) BOOL messageSentState; -@property (nonatomic) LYRMessage *message; @property (nonatomic) LYRProgress *progress; @property (nonatomic) NSUInteger lastProgressFractionCompleted; +@property (nonatomic) NSLayoutConstraint *bubbleWithAvatarLeadConstraint; +@property (nonatomic) NSLayoutConstraint *bubbleWithoutAvatarLeadConstraint; +@property (nonatomic) dispatch_queue_t imageProcessingConcurrentQueue; @end @implementation ATLMessageCollectionViewCell -CGFloat const ATLMessageCellMinimumHeight = 10.0f; -CGFloat const ATLMessageCellHorizontalMargin = 16.0f; - + (ATLMessageCollectionViewCell *)sharedCell { static ATLMessageCollectionViewCell *_sharedCell; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ - _sharedCell = [ATLMessageCollectionViewCell new]; + _sharedCell = [[self class] new]; + _sharedCell.tag = kATLSharedCellTag; + _sharedCell.hidden = YES; }); return _sharedCell; } ++ (NSCache *)sharedHeightCache +{ + static NSCache *sharedHeightCache; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + sharedHeightCache = [NSCache new]; + }); + return sharedHeightCache; +} + - (id)initWithFrame:(CGRect)frame { self = [super initWithFrame:frame]; @@ -68,10 +91,13 @@ - (id)initWithCoder:(NSCoder *)aDecoder - (void)lyr_commonInit { + _imageProcessingConcurrentQueue = dispatch_queue_create(ATLMessageCollectionViewCellImageProcessingConcurrentQueue, DISPATCH_QUEUE_CONCURRENT); + // Default UIAppearance _messageTextFont = [UIFont systemFontOfSize:17]; _messageTextColor = [UIColor blackColor]; _messageLinkTextColor = [UIColor whiteColor]; + _messageTextCheckingTypes = NSTextCheckingTypeLink; _bubbleViewColor = ATLBlueColor(); _bubbleViewCornerRadius = 17.0f; @@ -96,10 +122,11 @@ - (void)prepareForReuse // Remove self from any previously assigned LYRProgress instance. self.progress.delegate = nil; self.lastProgressFractionCompleted = 0; + [self.avatarImageView resetView]; [self.bubbleView prepareForReuse]; } -- (void)presentMessage:(LYRMessage *)message; +- (void)presentMessage:(LYRMessage *)message { self.message = message; LYRMessagePart *messagePart = message.parts.firstObject; @@ -110,8 +137,12 @@ - (void)presentMessage:(LYRMessage *)message; [self configureBubbleViewForImageContent]; }else if ([messagePart.MIMEType isEqualToString:ATLMIMETypeImagePNG]) { [self configureBubbleViewForImageContent]; + } else if ([messagePart.MIMEType isEqualToString:ATLMIMETypeImageGIF]){ + [self configureBubbleViewForGIFContent]; } else if ([messagePart.MIMEType isEqualToString:ATLMIMETypeLocation]) { [self configureBubbleViewForLocationContent]; + } else if ([messagePart.MIMEType isEqualToString:ATLMIMETypeVideoMP4]) { + [self configureBubbleViewForVideoContent]; } } @@ -126,12 +157,13 @@ - (void)configureBubbleViewForTextContent - (void)configureBubbleViewForImageContent { - self.accessibilityLabel = [NSString stringWithFormat:@"Message: Photo"]; + self.accessibilityLabel = ATLImageAccessibilityLabel; LYRMessagePart *fullResImagePart = ATLMessagePartForMIMEType(self.message, ATLMIMETypeImageJPEG); if (!fullResImagePart) { fullResImagePart = ATLMessagePartForMIMEType(self.message, ATLMIMETypeImagePNG); } + if (fullResImagePart && ((fullResImagePart.transferStatus == LYRContentTransferAwaitingUpload) || (fullResImagePart.transferStatus == LYRContentTransferUploading))) { // Set self for delegation, if full resolution message part @@ -144,12 +176,81 @@ - (void)configureBubbleViewForImageContent [self.bubbleView updateProgressIndicatorWithProgress:1.0 visible:NO animated:YES]; } - UIImage *displayingImage; + __block UIImage *displayingImage; LYRMessagePart *previewImagePart = ATLMessagePartForMIMEType(self.message, ATLMIMETypeImageJPEGPreview); + if (!previewImagePart) { // If no preview image part found, resort to the full-resolution image. previewImagePart = fullResImagePart; } + + __weak typeof(self) weakSelf = self; + __block LYRMessage *previousMessage = weakSelf.message; + + dispatch_async(self.imageProcessingConcurrentQueue, ^{ + if (previewImagePart.fileURL) { + displayingImage = [UIImage imageWithContentsOfFile:previewImagePart.fileURL.path]; + } else { + displayingImage = [UIImage imageWithData:previewImagePart.data]; + } + + CGSize size = CGSizeZero; + LYRMessagePart *sizePart = ATLMessagePartForMIMEType(self.message, ATLMIMETypeImageSize); + if (sizePart) { + size = ATLImageSizeForJSONData(sizePart.data); + size = ATLConstrainImageSizeToCellSize(size); + } + if (CGSizeEqualToSize(size, CGSizeZero)) { + // Resort to image's size, if no dimensions metadata message parts found. + size = ATLImageSizeForData(fullResImagePart.data); + } + dispatch_async(dispatch_get_main_queue(), ^{ + // Fall-back to programatically requesting for a content download of + // single message part messages (Android compatibillity). + if ([[weakSelf.message valueForKeyPath:@"parts.MIMEType"] isEqual:@[ATLMIMETypeImageJPEG]]) { + if (fullResImagePart && (fullResImagePart.transferStatus == LYRContentTransferReadyForDownload)) { + NSError *error; + LYRProgress *progress = [fullResImagePart downloadContent:&error]; + if (!progress) { + NSLog(@"failed to request for a content download from the UI with error=%@", error); + } + [weakSelf.bubbleView updateProgressIndicatorWithProgress:0.0 visible:NO animated:NO]; + } else if (fullResImagePart && (fullResImagePart.transferStatus == LYRContentTransferDownloading)) { + // Set self for delegation, if single image message part message + // hasn't been downloaded yet, or is still downloading. + LYRProgress *progress = fullResImagePart.progress; + [progress setDelegate:weakSelf]; + weakSelf.progress = progress; + [weakSelf.bubbleView updateProgressIndicatorWithProgress:progress.fractionCompleted visible:YES animated:NO]; + } else { + [weakSelf.bubbleView updateProgressIndicatorWithProgress:1.0 visible:NO animated:YES]; + } + } + if (weakSelf.message != previousMessage) { + return; + } + [weakSelf.bubbleView updateWithImage:displayingImage width:size.width]; + }); + }); +} + +- (void)configureBubbleViewForVideoContent +{ + self.accessibilityLabel = ATLVideoAccessibilityLabel; + + LYRMessagePart *fullResVideoPart = ATLMessagePartForMIMEType(self.message, ATLMIMETypeVideoMP4); + if (fullResVideoPart && ((fullResVideoPart.transferStatus == LYRContentTransferAwaitingUpload) || + (fullResVideoPart.transferStatus == LYRContentTransferUploading))) { + // Set self for delegation, if full resolution message part + // hasn't been uploaded yet, or is still uploading. + LYRProgress *progress = fullResVideoPart.progress; + [progress setDelegate:self]; + self.progress = progress; + [self.bubbleView updateProgressIndicatorWithProgress:progress.fractionCompleted visible:YES animated:NO]; + } + LYRMessagePart *previewImagePart = ATLMessagePartForMIMEType(self.message, ATLMIMETypeImageJPEGPreview); + + UIImage *displayingImage; if (previewImagePart.fileURL) { displayingImage = [UIImage imageWithContentsOfFile:previewImagePart.fileURL.path]; } else { @@ -162,11 +263,88 @@ - (void)configureBubbleViewForImageContent size = ATLImageSizeForJSONData(sizePart.data); size = ATLConstrainImageSizeToCellSize(size); } - if (CGSizeEqualToSize(size, CGSizeZero)) { - // Resort to image's size, if no dimensions metadata message parts found. - size = ATLImageSizeForData(fullResImagePart.data); + [self.bubbleView updateWithVideoThumbnail:displayingImage width:size.width]; +} + +- (void)configureBubbleViewForGIFContent +{ + self.accessibilityLabel = ATLGIFAccessibilityLabel; + + LYRMessagePart *fullResImagePart = ATLMessagePartForMIMEType(self.message, ATLMIMETypeImageGIF); + + if (fullResImagePart && ((fullResImagePart.transferStatus == LYRContentTransferAwaitingUpload) || + (fullResImagePart.transferStatus == LYRContentTransferUploading))) { + // Set self for delegation, if full resolution message part + // hasn't been uploaded yet, or is still uploading. + LYRProgress *progress = fullResImagePart.progress; + [progress setDelegate:self]; + self.progress = progress; + [self.bubbleView updateProgressIndicatorWithProgress:progress.fractionCompleted visible:YES animated:NO]; + } else { + [self.bubbleView updateProgressIndicatorWithProgress:1.0 visible:NO animated:YES]; + } + + __block UIImage *displayingImage; + LYRMessagePart *previewImagePart = ATLMessagePartForMIMEType(self.message, ATLMIMETypeImageGIFPreview); + + if (!previewImagePart) { + // If no preview image part found, resort to the full-resolution image. + previewImagePart = fullResImagePart; } - [self.bubbleView updateWithImage:displayingImage width:size.width]; + __weak typeof(self) weakSelf = self; + __block LYRMessage *previousMessage = weakSelf.message; + + dispatch_async(self.imageProcessingConcurrentQueue, ^{ + if (previewImagePart.fileURL) { + displayingImage = ATLAnimatedImageWithAnimatedGIFURL(previewImagePart.fileURL); + } else if (previewImagePart.data) { + displayingImage = ATLAnimatedImageWithAnimatedGIFData(previewImagePart.data); + } + + CGSize size = CGSizeZero; + LYRMessagePart *sizePart = ATLMessagePartForMIMEType(self.message, ATLMIMETypeImageSize); + if (sizePart) { + size = ATLImageSizeForJSONData(sizePart.data); + size = ATLConstrainImageSizeToCellSize(size); + } + if (CGSizeEqualToSize(size, CGSizeZero)) { + // Resort to image's size, if no dimensions metadata message parts found. + size = ATLImageSizeForData(fullResImagePart.data); + } + + dispatch_async(dispatch_get_main_queue(), ^{ + // For GIFs we only download full resolution parts when rendered in the UI + // Low res GIFs are autodownloaded but blurry + if ([fullResImagePart.MIMEType isEqualToString:ATLMIMETypeImageGIF]) { + if (fullResImagePart.transferStatus == LYRContentTransferReadyForDownload) { + NSError *error; + LYRProgress *progress = [fullResImagePart downloadContent:&error]; + if (!progress) { + NSLog(@"failed to request for a content download from the UI with error=%@", error); + } + [weakSelf.bubbleView updateProgressIndicatorWithProgress:0.0 visible:NO animated:NO]; + [weakSelf.bubbleView updateWithImage:displayingImage width:size.width]; + } else if (fullResImagePart.transferStatus == LYRContentTransferDownloading) { + LYRProgress *progress = fullResImagePart.progress; + [progress setDelegate:weakSelf]; + weakSelf.progress = progress; + [weakSelf.bubbleView updateProgressIndicatorWithProgress:progress.fractionCompleted visible:YES animated:NO]; + [weakSelf.bubbleView updateWithImage:displayingImage width:size.width]; + } else { + dispatch_async(weakSelf.imageProcessingConcurrentQueue, ^{ + displayingImage = ATLAnimatedImageWithAnimatedGIFData(fullResImagePart.data); + dispatch_async(dispatch_get_main_queue(), ^{ + if (weakSelf.message != previousMessage) { + return; + } + [weakSelf.bubbleView updateProgressIndicatorWithProgress:1.0 visible:NO animated:YES]; + [weakSelf.bubbleView updateWithImage:displayingImage width:size.width]; + }); + }); + } + } + }); + }); } - (void)configureBubbleViewForLocationContent @@ -199,6 +377,12 @@ - (void)setMessageLinkTextColor:(UIColor *)messageLinkTextColor if ([self messageContainsTextContent]) [self configureBubbleViewForTextContent]; } +- (void)setMessageTextCheckingTypes:(NSTextCheckingType)messageLinkTypes +{ + _messageTextCheckingTypes = messageLinkTypes; + self.bubbleView.textCheckingTypes = messageLinkTypes; +} + - (void)setBubbleViewColor:(UIColor *)bubbleViewColor { _bubbleViewColor = bubbleViewColor; @@ -237,7 +421,7 @@ - (NSAttributedString *)attributedStringForText:(NSString *)text { NSDictionary *attributes = @{NSFontAttributeName : self.messageTextFont, NSForegroundColorAttributeName : self.messageTextColor}; NSMutableAttributedString *attributedString = [[NSMutableAttributedString alloc] initWithString:text attributes:attributes]; - NSArray *linkResults = ATLLinkResultsForText(text); + NSArray *linkResults = ATLTextCheckingResultsForText(text, self.messageTextCheckingTypes); for (NSTextCheckingResult *result in linkResults) { NSDictionary *linkAttributes = @{NSForegroundColorAttributeName : self.messageLinkTextColor, NSUnderlineStyleAttributeName : @(NSUnderlineStyleSingle)}; @@ -258,28 +442,44 @@ - (void)configureLayoutConstraints [self.contentView addConstraint:[NSLayoutConstraint constraintWithItem:self.bubbleView attribute:NSLayoutAttributeWidth relatedBy:NSLayoutRelationLessThanOrEqual toItem:nil attribute:NSLayoutAttributeNotAnAttribute multiplier:1.0 constant:maxBubbleWidth]]; [self.contentView addConstraint:[NSLayoutConstraint constraintWithItem:self.bubbleView attribute:NSLayoutAttributeHeight relatedBy:NSLayoutRelationEqual toItem:self.contentView attribute:NSLayoutAttributeHeight multiplier:1.0 constant:0]]; [self.contentView addConstraint:[NSLayoutConstraint constraintWithItem:self.bubbleView attribute:NSLayoutAttributeTop relatedBy:NSLayoutRelationEqual toItem:self.contentView attribute:NSLayoutAttributeTop multiplier:1.0 constant:0]]; + [self.contentView addConstraint:[NSLayoutConstraint constraintWithItem:self.avatarImageView attribute:NSLayoutAttributeBottom relatedBy:NSLayoutRelationEqual toItem:self.contentView attribute:NSLayoutAttributeBottom multiplier:1.0 constant:0]]; } - (void)updateWithSender:(id)sender { - // Implemented by subclass + if (sender) { + self.avatarImageView.hidden = NO; + self.avatarImageView.avatarItem = sender; + } else { + self.avatarImageView.hidden = YES; + } } - (void)shouldDisplayAvatarItem:(BOOL)shouldDisplayAvatarItem { - // Implemented by subclass + NSArray *constraints = [self.contentView constraints]; + if (shouldDisplayAvatarItem) { + if ([constraints containsObject:self.bubbleWithAvatarLeadConstraint]) return; + [self.contentView removeConstraint:self.bubbleWithoutAvatarLeadConstraint]; + [self.contentView addConstraint:self.bubbleWithAvatarLeadConstraint]; + } else { + if ([constraints containsObject:self.bubbleWithoutAvatarLeadConstraint]) return; + [self.contentView removeConstraint:self.bubbleWithAvatarLeadConstraint]; + [self.contentView addConstraint:self.bubbleWithoutAvatarLeadConstraint]; + } + [self setNeedsUpdateConstraints]; } -#pragma mark - Cell Height Calculations +#pragma mark - Cell Height Calculations + (CGFloat)cellHeightForMessage:(LYRMessage *)message inView:(UIView *)view { LYRMessagePart *part = message.parts.firstObject; - + CGFloat height = 0; if ([part.MIMEType isEqualToString:ATLMIMETypeTextPlain]) { height = [self cellHeightForTextMessage:message inView:view]; - } else if ([part.MIMEType isEqualToString:ATLMIMETypeImageJPEG] || [part.MIMEType isEqualToString:ATLMIMETypeImagePNG]) { + } else if ([part.MIMEType isEqualToString:ATLMIMETypeImageJPEG] || [part.MIMEType isEqualToString:ATLMIMETypeImagePNG] || [part.MIMEType isEqualToString:ATLMIMETypeImageGIF]|| [part.MIMEType isEqualToString:ATLMIMETypeVideoMP4]) { height = [self cellHeightForImageMessage:message]; } else if ([part.MIMEType isEqualToString:ATLMIMETypeLocation]) { height = ATLMessageBubbleMapHeight; @@ -291,16 +491,27 @@ + (CGFloat)cellHeightForMessage:(LYRMessage *)message inView:(UIView *)view + (CGFloat)cellHeightForTextMessage:(LYRMessage *)message inView:(id)view { - // Temporarily adding the view to the hierarchy so that UIAppearance property values will be set based on containment. + if ([[self sharedHeightCache] objectForKey:message.identifier]) { + return [[[self sharedHeightCache] objectForKey:message.identifier] floatValue]; + } + // Adding the view to the hierarchy so that UIAppearance property values will be set based on containment. ATLMessageCollectionViewCell *cell = [self sharedCell]; - [view addSubview:cell]; - [cell removeFromSuperview]; + if (![view viewWithTag:kATLSharedCellTag]) { + [view addSubview:cell]; + } LYRMessagePart *part = message.parts.firstObject; NSString *text = [[NSString alloc] initWithData:part.data encoding:NSUTF8StringEncoding]; - UIFont *font = cell.messageTextFont; + UIFont *font = [[[self class] appearance] messageTextFont]; + if (!font) { + font = cell.messageTextFont; + } CGSize size = ATLTextPlainSize(text, font); - return size.height + ATLMessageBubbleLabelVerticalPadding * 2; + CGFloat height = size.height + ATLMessageBubbleLabelVerticalPadding * 2; + if (![[self sharedHeightCache] objectForKey:message.identifier]) { + [[self sharedHeightCache] setObject:@(height) forKey:message.identifier]; + } + return height; } + (CGFloat)cellHeightForImageMessage:(LYRMessage *)message @@ -318,7 +529,15 @@ + (CGFloat)cellHeightForImageMessage:(LYRMessage *)message imagePart = ATLMessagePartForMIMEType(message, ATLMIMETypeImageJPEG); } // Resort to image's size, if no dimensions metadata message parts found. - size = ATLImageSizeForData(imagePart.data); + if ((imagePart.transferStatus == LYRContentTransferComplete) || + (imagePart.transferStatus == LYRContentTransferAwaitingUpload) || + (imagePart.transferStatus == LYRContentTransferUploading)) { + size = ATLImageSizeForData(imagePart.data); + } else { + // We don't have the image data yet, making cell think there's + // an image with 3:4 aspect ration (portrait photo). + size = ATLConstrainImageSizeToCellSize(CGSizeMake(3000, 4000)); + } } return size.height; } diff --git a/Code/Views/ATLMessageComposeTextView.m b/Code/Views/ATLMessageComposeTextView.m index 3e5f89348..9f7a0cc98 100644 --- a/Code/Views/ATLMessageComposeTextView.m +++ b/Code/Views/ATLMessageComposeTextView.m @@ -46,7 +46,7 @@ - (id)init self.placeholderLabel = [UILabel new]; self.placeholderLabel.font = self.font; - self.placeholderLabel.text = self.placeholder; + self.placeholderLabel.text = ATLLocalizedString(self.placeholder, self.placeholder, nil);; self.placeholderLabel.textColor = [UIColor lightGrayColor]; self.placeholderLabel.lineBreakMode = NSLineBreakByTruncatingTail; [self addSubview:self.placeholderLabel]; @@ -105,7 +105,7 @@ - (void)setAttributedText:(NSAttributedString *)attributedText - (void)setPlaceholder:(NSString *)placeholder { _placeholder = placeholder; - self.placeholderLabel.text = placeholder; + self.placeholderLabel.text = ATLLocalizedString(placeholder, placeholder, nil); [self setNeedsLayout]; } diff --git a/Code/Views/ATLMessageInputToolbar.h b/Code/Views/ATLMessageInputToolbar.h index f54a0e6f0..af16f47ea 100644 --- a/Code/Views/ATLMessageInputToolbar.h +++ b/Code/Views/ATLMessageInputToolbar.h @@ -78,11 +78,12 @@ extern NSString *const ATLMessageInputToolbarAccessibilityLabel; /** @abstract Inserts the mediaAttachment as an attributed text attachment which is inlined with text. @param mediaAttachment The `ATLMediaAttachment` instance containing information about the media. + @param endLineBreak A `BOOL` which if `YES` inserts a new line after the media attachment insertion. @discussion The view will automatically resize the attachment's thumbnail and itself to comfortably fit the thumbnail content. The image will also be cached and is accessible via the mediaAttachments property. */ -- (void)insertMediaAttachment:(ATLMediaAttachment *)mediaAttachment; +- (void)insertMediaAttachment:(ATLMediaAttachment *)mediaAttachment withEndLineBreak:(BOOL)endLineBreak; //----------------------------- // UI Customization @@ -90,7 +91,7 @@ extern NSString *const ATLMessageInputToolbarAccessibilityLabel; /** @abstract The left accessory button for the view. - @discussion By default, the button displays a camera icon. + @discussion By default, the button displays a camera icon. If set to `nil` the `textInputView` will expand to the left edge of the toolbar. */ @property (nonatomic) UIButton *leftAccessoryButton; @@ -100,6 +101,40 @@ extern NSString *const ATLMessageInputToolbarAccessibilityLabel; */ @property (nonatomic) UIButton *rightAccessoryButton; +/** + @abstract The font color for the right accessory button in active state. + */ +@property (nonatomic) UIColor *rightAccessoryButtonActiveColor UI_APPEARANCE_SELECTOR; + +/** + @abstract The font color for the right accessory button in disabled state. + */ +@property (nonatomic) UIColor *rightAccessoryButtonDisabledColor UI_APPEARANCE_SELECTOR; + +/** + @abstract The font for the right accessory button. + */ +@property (nonatomic) UIFont *rightAccessoryButtonFont UI_APPEARANCE_SELECTOR; + +/** + @abstract The image displayed on left accessory button. + @default A `camera` icon. + */ +@property (nonatomic) UIImage *leftAccessoryImage; + +/** + @abstract The image displayed on right accessory button. + @default A `location` icon. + */ +@property (nonatomic) UIImage *rightAccessoryImage; + +/** + @abstract Determines whether or not the right accessory button displays an icon. + @disucssion If NO, the right accessory button will display the text `SEND` at all times. + @default YES + */ +@property(nonatomic) BOOL displaysRightAccessoryImage; + /** @abstract An automatically resizing message composition field. */ @@ -124,4 +159,15 @@ extern NSString *const ATLMessageInputToolbarAccessibilityLabel; */ @property (nonatomic, readonly) NSArray *mediaAttachments; +//------------------- +// Layout Accessories +//------------------- + +/** + @abstract The view controller whose input accessory view is the `ATLMessageInputToolbar`. + @discussion This property is set internally in the `ATLBaseConversationViewController` to change the view's frame + to support UISplitViewController usage. This property should only be set when subclassing `ATLMessageInputToolbar`. + */ +@property (nonatomic, weak) UIViewController *containerViewController; + @end diff --git a/Code/Views/ATLMessageInputToolbar.m b/Code/Views/ATLMessageInputToolbar.m index c25ffcce8..fc560a4e7 100644 --- a/Code/Views/ATLMessageInputToolbar.m +++ b/Code/Views/ATLMessageInputToolbar.m @@ -31,6 +31,7 @@ @interface ATLMessageInputToolbar () @property (nonatomic) UITextView *dummyTextView; @property (nonatomic) CGFloat textViewMaxHeight; @property (nonatomic) CGFloat buttonCenterY; +@property (nonatomic) BOOL firstAppearance; @end @@ -52,6 +53,14 @@ @implementation ATLMessageInputToolbar static CGFloat const ATLRightAccessoryButtonWidth = 46.0f; static CGFloat const ATLButtonHeight = 28.0f; ++ (void)initialize +{ + ATLMessageInputToolbar *proxy = [self appearance]; + proxy.rightAccessoryButtonActiveColor = ATLBlueColor(); + proxy.rightAccessoryButtonDisabledColor = [UIColor grayColor]; + proxy.rightAccessoryButtonFont = [UIFont boldSystemFontOfSize:17]; +} + - (id)init { self = [super init]; @@ -59,11 +68,16 @@ - (id)init self.accessibilityLabel = ATLMessageInputToolbarAccessibilityLabel; self.translatesAutoresizingMaskIntoConstraints = NO; self.autoresizingMask = UIViewAutoresizingFlexibleWidth; - + + self.leftAccessoryImage = [UIImage imageNamed:@"AtlasResource.bundle/camera_dark"]; + self.rightAccessoryImage = [UIImage imageNamed:@"AtlasResource.bundle/location_dark"]; + self.displaysRightAccessoryImage = YES; + self.firstAppearance = YES; + self.leftAccessoryButton = [[UIButton alloc] init]; self.leftAccessoryButton.accessibilityLabel = ATLMessageInputToolbarCameraButton; self.leftAccessoryButton.contentMode = UIViewContentModeScaleAspectFit; - [self.leftAccessoryButton setImage:[UIImage imageNamed:@"AtlasResource.bundle/camera_dark"] forState:UIControlStateNormal]; + [self.leftAccessoryButton setImage:self.leftAccessoryImage forState:UIControlStateNormal]; [self.leftAccessoryButton addTarget:self action:@selector(leftAccessoryButtonTapped) forControlEvents:UIControlEventTouchUpInside]; [self addSubview:self.leftAccessoryButton]; @@ -79,7 +93,7 @@ - (id)init [self.rightAccessoryButton addTarget:self action:@selector(rightAccessoryButtonTapped) forControlEvents:UIControlEventTouchUpInside]; [self addSubview:self.rightAccessoryButton]; [self configureRightAccessoryButtonState]; - + // Calling sizeThatFits: or contentSize on the displayed UITextView causes the cursor's position to momentarily appear out of place and prevent scrolling to the selected range. So we use another text view for height calculations. self.dummyTextView = [[ATLMessageComposeTextView alloc] init]; self.maxNumberOfLines = 8; @@ -90,14 +104,34 @@ - (id)init - (void)layoutSubviews { [super layoutSubviews]; - + + if (self.firstAppearance) { + [self configureRightAccessoryButtonState]; + self.firstAppearance = NO; + } + + // set the font for the dummy text view as well + self.dummyTextView.font = self.textInputView.font; + // We layout the views manually since using Auto Layout seems to cause issues in this context (i.e. an auto height resizing text view in an input accessory view) especially with iOS 7.1. CGRect frame = self.frame; CGRect leftButtonFrame = self.leftAccessoryButton.frame; CGRect rightButtonFrame = self.rightAccessoryButton.frame; CGRect textViewFrame = self.textInputView.frame; - leftButtonFrame.size.width = ATLLeftAccessoryButtonWidth; + if (!self.leftAccessoryButton) { + leftButtonFrame.size.width = 0; + } else { + leftButtonFrame.size.width = ATLLeftAccessoryButtonWidth; + } + + // This makes the input accessory view work with UISplitViewController to manage the frame width. + if (self.containerViewController) { + CGRect windowRect = [self.containerViewController.view convertRect:self.containerViewController.view.frame toView:nil]; + frame.size.width = windowRect.size.width; + frame.origin.x = windowRect.origin.x; + } + leftButtonFrame.size.height = ATLButtonHeight; leftButtonFrame.origin.x = ATLLeftButtonHorizontalMargin; @@ -139,15 +173,13 @@ - (void)layoutSubviews - (void)paste:(id)sender { - NSArray *images = [UIPasteboard generalPasteboard].images; - if (images.count > 0) { - for (UIImage *image in images) { - ATLMediaAttachment *mediaAttachment = [ATLMediaAttachment mediaAttachmentWithImage:image - metadata:nil - thumbnailSize:ATLDefaultThumbnailSize]; - [self insertMediaAttachment:mediaAttachment]; - } - return; + NSData *imageData = [[UIPasteboard generalPasteboard] dataForPasteboardType:ATLPasteboardImageKey]; + if (imageData) { + UIImage *image = [UIImage imageWithData:imageData]; + ATLMediaAttachment *mediaAttachment = [ATLMediaAttachment mediaAttachmentWithImage:image + metadata:nil + thumbnailSize:ATLDefaultThumbnailSize]; + [self insertMediaAttachment:mediaAttachment withEndLineBreak:YES]; } } @@ -160,7 +192,7 @@ - (void)setMaxNumberOfLines:(NSUInteger)maxNumberOfLines [self setNeedsLayout]; } -- (void)insertMediaAttachment:(ATLMediaAttachment *)mediaAttachment +- (void)insertMediaAttachment:(ATLMediaAttachment *)mediaAttachment withEndLineBreak:(BOOL)endLineBreak; { UITextView *textView = self.textInputView; @@ -170,11 +202,15 @@ - (void)insertMediaAttachment:(ATLMediaAttachment *)mediaAttachment [attributedString appendAttributedString:lineBreak]; } - NSMutableAttributedString *attachmentString = [[NSAttributedString attributedStringWithAttachment:mediaAttachment] mutableCopy]; - [attachmentString addAttribute:NSFontAttributeName value:textView.font range:NSMakeRange(0, attachmentString.length)]; + NSMutableAttributedString *attachmentString = (mediaAttachment.mediaMIMEType == ATLMIMETypeTextPlain) ? [[NSAttributedString alloc] initWithString:mediaAttachment.textRepresentation] : [[NSAttributedString attributedStringWithAttachment:mediaAttachment] mutableCopy]; [attributedString appendAttributedString:attachmentString]; - [attributedString appendAttributedString:lineBreak]; - + if (endLineBreak) { + [attributedString appendAttributedString:lineBreak]; + } + [attributedString addAttribute:NSFontAttributeName value:textView.font range:NSMakeRange(0, attributedString.length)]; + if (textView.textColor) { + [attributedString addAttribute:NSForegroundColorAttributeName value:textView.textColor range:NSMakeRange(0, attributedString.length)]; + } textView.attributedText = attributedString; if ([self.inputToolBarDelegate respondsToSelector:@selector(messageInputToolbarDidType:)]) { [self.inputToolBarDelegate messageInputToolbarDidType:self]; @@ -193,6 +229,36 @@ - (NSArray *)mediaAttachments return _mediaAttachments; } +- (void)setLeftAccessoryImage:(UIImage *)leftAccessoryImage +{ + _leftAccessoryImage = leftAccessoryImage; + [self.leftAccessoryButton setImage:leftAccessoryImage forState:UIControlStateNormal]; +} + +- (void)setRightAccessoryImage:(UIImage *)rightAccessoryImage +{ + _rightAccessoryImage = rightAccessoryImage; + [self.rightAccessoryButton setImage:rightAccessoryImage forState:UIControlStateNormal]; +} + +- (void)setRightAccessoryButtonActiveColor:(UIColor *)rightAccessoryButtonActiveColor +{ + _rightAccessoryButtonActiveColor = rightAccessoryButtonActiveColor; + [self.rightAccessoryButton setTitleColor:rightAccessoryButtonActiveColor forState:UIControlStateNormal]; +} + +- (void)setRightAccessoryButtonDisabledColor:(UIColor *)rightAccessoryButtonDisabledColor +{ + _rightAccessoryButtonDisabledColor = rightAccessoryButtonDisabledColor; + [self.rightAccessoryButton setTitleColor:rightAccessoryButtonDisabledColor forState:UIControlStateDisabled]; +} + +- (void)setRightAccessoryButtonFont:(UIFont *)rightAccessoryButtonFont +{ + _rightAccessoryButtonFont = rightAccessoryButtonFont; + [self.rightAccessoryButton.titleLabel setFont:rightAccessoryButtonFont]; +} + #pragma mark - Actions - (void)leftAccessoryButtonTapped @@ -297,19 +363,43 @@ - (void)acceptAutoCorrectionSuggestion - (void)configureRightAccessoryButtonState { if (self.textInputView.text.length) { - self.rightAccessoryButton.accessibilityLabel = ATLMessageInputToolbarSendButton; - [self.rightAccessoryButton setImage:nil forState:UIControlStateNormal]; - self.rightAccessoryButton.contentEdgeInsets = UIEdgeInsetsMake(2, 0, 0, 0); - self.rightAccessoryButton.titleLabel.font = [UIFont boldSystemFontOfSize:17]; - [self.rightAccessoryButton setTitle:@"Send" forState:UIControlStateNormal]; - [self.rightAccessoryButton setTitleColor:[UIColor grayColor] forState:UIControlStateDisabled]; - [self.rightAccessoryButton setTitleColor:ATLBlueColor() forState:UIControlStateNormal]; + [self configureRightAccessoryButtonForText]; + self.rightAccessoryButton.enabled = YES; + } else { + if (self.displaysRightAccessoryImage) { + [self configureRightAccessoryButtonForImage]; + self.rightAccessoryButton.enabled = YES; + } else { + [self configureRightAccessoryButtonForText]; + self.rightAccessoryButton.enabled = NO; + } + } +} + +- (void)configureRightAccessoryButtonForText +{ + self.rightAccessoryButton.accessibilityLabel = ATLMessageInputToolbarSendButton; + [self.rightAccessoryButton setImage:nil forState:UIControlStateNormal]; + self.rightAccessoryButton.contentEdgeInsets = UIEdgeInsetsMake(2, 0, 0, 0); + self.rightAccessoryButton.titleLabel.font = self.rightAccessoryButtonFont; + [self.rightAccessoryButton setTitle:ATLLocalizedString(@"atl.messagetoolbar.send.key", @"Send", nil) forState:UIControlStateNormal]; + [self.rightAccessoryButton setTitleColor:self.rightAccessoryButtonActiveColor forState:UIControlStateNormal]; + [self.rightAccessoryButton setTitleColor:self.rightAccessoryButtonDisabledColor forState:UIControlStateDisabled]; + if (!self.displaysRightAccessoryImage && !self.textInputView.text.length) { + self.rightAccessoryButton.enabled = NO; } else { - self.rightAccessoryButton.accessibilityLabel = ATLMessageInputToolbarLocationButton; - [self.rightAccessoryButton setTitle:nil forState:UIControlStateNormal]; - self.rightAccessoryButton.contentEdgeInsets = UIEdgeInsetsZero; - [self.rightAccessoryButton setImage:[UIImage imageNamed:@"AtlasResource.bundle/location_dark"] forState:UIControlStateNormal]; + self.rightAccessoryButton.enabled = YES; } } +- (void)configureRightAccessoryButtonForImage +{ + self.rightAccessoryButton.enabled = YES; + self.rightAccessoryButton.accessibilityLabel = ATLMessageInputToolbarLocationButton; + self.rightAccessoryButton.contentEdgeInsets = UIEdgeInsetsZero; + [self.rightAccessoryButton setTitle:nil forState:UIControlStateNormal]; + [self.rightAccessoryButton setImage:self.rightAccessoryImage forState:UIControlStateNormal]; +} + + @end diff --git a/Code/Views/ATLOutgoingMessageCollectionViewCell.m b/Code/Views/ATLOutgoingMessageCollectionViewCell.m index 81c38c9b1..cce6a5959 100644 --- a/Code/Views/ATLOutgoingMessageCollectionViewCell.m +++ b/Code/Views/ATLOutgoingMessageCollectionViewCell.m @@ -20,10 +20,18 @@ #import "ATLOutgoingMessageCollectionViewCell.h" -@implementation ATLOutgoingMessageCollectionViewCell - NSString *const ATLOutgoingMessageCellIdentifier = @"ATLOutgoingMessageCellIdentifier"; +@interface ATLMessageCollectionViewCell() + +@property (nonatomic) NSLayoutConstraint *bubbleWithAvatarLeadConstraint; +@property (nonatomic) NSLayoutConstraint *bubbleWithoutAvatarLeadConstraint; + +@end + + +@implementation ATLOutgoingMessageCollectionViewCell + + (void)initialize { ATLOutgoingMessageCollectionViewCell *proxy = [self appearance]; @@ -52,8 +60,15 @@ - (id)initWithCoder:(NSCoder *)aDecoder - (void)lyr_outgoingCommonInit { - self.avatarImageView.hidden = YES; - [self.contentView addConstraint:[NSLayoutConstraint constraintWithItem:self.bubbleView attribute:NSLayoutAttributeRight relatedBy:NSLayoutRelationEqual toItem:self.contentView attribute:NSLayoutAttributeRight multiplier:1.0 constant:-ATLMessageCellHorizontalMargin]]; + [self configureConstraintsForOutgoingMessage]; +} + +- (void)configureConstraintsForOutgoingMessage +{ + [self.contentView addConstraint:[NSLayoutConstraint constraintWithItem:self.avatarImageView attribute:NSLayoutAttributeRight relatedBy:NSLayoutRelationEqual toItem:self.contentView attribute:NSLayoutAttributeRight multiplier:1.0 constant:-ATLAvatarImageLeadPadding]]; + self.bubbleWithAvatarLeadConstraint = [NSLayoutConstraint constraintWithItem:self.avatarImageView attribute:NSLayoutAttributeLeft relatedBy:NSLayoutRelationEqual toItem: self.bubbleView attribute: NSLayoutAttributeRight multiplier:1.0 constant:ATLAvatarImageTailPadding]; + [self.contentView addConstraint:self.bubbleWithAvatarLeadConstraint]; + self.bubbleWithoutAvatarLeadConstraint = [NSLayoutConstraint constraintWithItem:self.bubbleView attribute:NSLayoutAttributeRight relatedBy:NSLayoutRelationEqual toItem: self.contentView attribute:NSLayoutAttributeRight multiplier:1.0 constant:-ATLMessageCellHorizontalMargin]; } @end diff --git a/Code/Views/ATLParticipantTableViewCell.m b/Code/Views/ATLParticipantTableViewCell.m index 3ab55a7f4..b5cedc236 100644 --- a/Code/Views/ATLParticipantTableViewCell.m +++ b/Code/Views/ATLParticipantTableViewCell.m @@ -78,6 +78,7 @@ - (void)prepareForReuse { [super prepareForReuse]; self.accessoryView = nil; + [self.avatarImageView resetView]; } - (void)setHighlighted:(BOOL)highlighted animated:(BOOL)animated diff --git a/Code/Views/ATLPlayView.h b/Code/Views/ATLPlayView.h new file mode 100644 index 000000000..7196ec028 --- /dev/null +++ b/Code/Views/ATLPlayView.h @@ -0,0 +1,29 @@ +// +// ATLUIProgressView.h +// Atlas +// +// Created by Dinesh Kakumani on 7/21/15. +// Copyright (c) 2015 Layer, Inc. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +#import + +/** + @abstract A custom view that displays a circular play button indicating that + the content below it is playable. + */ +@interface ATLPlayView : UIView + +@end diff --git a/Code/Views/ATLPlayView.m b/Code/Views/ATLPlayView.m new file mode 100644 index 000000000..e3e166e6b --- /dev/null +++ b/Code/Views/ATLPlayView.m @@ -0,0 +1,117 @@ +// +// ATLUIProgressView.h +// Atlas +// +// Created by Dinesh Kakumani on 7/21/15. +// Copyright (c) 2015 Layer, Inc. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +#import "ATLPlayView.h" +#import "ATLMessagingUtilities.h" + +@interface ATLPlayView () + +@property (nonatomic) CAShapeLayer *circleLayer; +@property (nonatomic) CAShapeLayer *iconLayer; +@property (nonatomic) UIColor *defaultBackgroundRingColor; +@property (nonatomic) UIColor *defaultForegroundRingColor; +@property (nonatomic, readonly) CGFloat radius; + +@end + +@implementation ATLPlayView + +- (id)initWithFrame:(CGRect)frame +{ + self = [super initWithFrame:frame]; + if (self) { + [self lyr_commonInit]; + } + return self; +} + +- (id)initWithCoder:(NSCoder *)aDecoder +{ + self = [super initWithCoder:aDecoder]; + if (self) { + [self lyr_commonInit]; + } + return self; +} + +- (void)lyr_commonInit +{ + _circleLayer = [CAShapeLayer layer]; + _circleLayer.fillRule = @"even-odd"; + _iconLayer = [CAShapeLayer layer]; + [self.layer addSublayer:_circleLayer]; + [self.layer addSublayer:_iconLayer]; + _defaultBackgroundRingColor = [UIColor colorWithWhite:0.8f alpha:0.5f]; + _defaultForegroundRingColor = [UIColor colorWithWhite:0.2f alpha:0.8f]; +} + +- (void)layoutSubviews +{ + [super layoutSubviews]; + CGRect bounds = self.bounds; + + // Draw the triangle + CGPoint center = CGPointMake(CGRectGetMidX(bounds), CGRectGetMidY(bounds)); + UIBezierPath *triangle = [UIBezierPath bezierPath]; + [triangle moveToPoint:CGPointMake(center.x, center.y - self.radius / 2)]; + [triangle addLineToPoint:CGPointMake(center.x + self.radius / 2, center.y + self.radius / 2)]; + [triangle addLineToPoint:CGPointMake(center.x - self.radius / 2, center.y + self.radius / 2)]; + [triangle applyTransform:CGAffineTransformMakeScale(0.5f, 0.4f)]; + [triangle applyTransform:CGAffineTransformMakeRotation(ATLDegreeToRadians(90))]; + [triangle applyTransform:CGAffineTransformMakeTranslation(self.radius - self.radius / 4, self.radius / 4)]; + + // Draw the circle + UIBezierPath *arc = [UIBezierPath bezierPathWithArcCenter:center + radius:self.radius / 2 + startAngle:ATLDegreeToRadians(0 - 90) + endAngle:ATLDegreeToRadians(360 - 90) + clockwise:YES]; + + UIBezierPath *clippedPath = [UIBezierPath bezierPath]; + clippedPath.usesEvenOddFillRule = YES; + [clippedPath appendPath:arc]; + [clippedPath appendPath:triangle]; + + _circleLayer.frame = self.bounds; + _circleLayer.path = clippedPath.CGPath; + _circleLayer.anchorPoint = CGPointMake(0.5, 0.5); + _circleLayer.fillColor = _defaultBackgroundRingColor.CGColor; + _circleLayer.position = CGPointMake(self.layer.frame.size.width / 2, self.layer.frame.size.height / 2); + _circleLayer.strokeEnd = 1.0f; + + _iconLayer.frame = self.bounds; + _iconLayer.path = triangle.CGPath; + _iconLayer.anchorPoint = CGPointMake(0.5, 0.5); + _iconLayer.fillColor = _defaultForegroundRingColor.CGColor; + _iconLayer.position = CGPointMake(self.layer.frame.size.width / 2, self.layer.frame.size.height / 2); + _iconLayer.strokeEnd = 1.0f; +} + +- (CGFloat)radius +{ + return MIN(self.bounds.size.width, self.bounds.size.height); +} + +- (BOOL)isUserInteractionEnabled +{ + return NO; +} + +@end diff --git a/Code/Views/ATLProgressView.h b/Code/Views/ATLProgressView.h index d62736d0f..edf3f7856 100644 --- a/Code/Views/ATLProgressView.h +++ b/Code/Views/ATLProgressView.h @@ -22,12 +22,12 @@ /** @abstract A custom progress view that displays the progress in a circular - shape. It includes two visual components: a subtle transparent background - ring as a placeholder and a foreground ring representing the current - progress state. + shape. It includes two visual components: a subtle transparent background + ring as a placeholder and a foreground ring representing the current + progress state. - Progress changes can be animated using the `setProgress:animated:` method - where a `YES` value has to be passed as the `animated` argument. + Progress changes can be animated using the `setProgress:animated:` method + where a `YES` value has to be passed as the `animated` argument. */ @interface ATLProgressView : UIView @@ -40,7 +40,8 @@ /** @abstract Sets the `progress` float value. @param newProgress The value the progress will be set to. - @param animated Pass `YES` to animate the progress change, or `NO` to do an immediate update. + @param animated Pass `YES` to animate the progress change, or `NO` to do + an immediate update. */ - (void)setProgress:(float)newProgress animated:(BOOL)animated; diff --git a/Code/Views/ATLProgressView.m b/Code/Views/ATLProgressView.m index 144d67de3..328f0597c 100644 --- a/Code/Views/ATLProgressView.m +++ b/Code/Views/ATLProgressView.m @@ -19,22 +19,17 @@ // #import "ATLProgressView.h" +#import "ATLMessagingUtilities.h" static NSTimeInterval const ATLProgressViewDefaultBorderWidth = 8.00f; static NSTimeInterval const ATLProgressViewDefaultTimeInterval = 0.25f; -float ATLDegreeToRadians(float degrees) -{ - return ((M_PI * degrees)/ 180); -} - @interface ATLProgressView () @property (nonatomic) CAShapeLayer *backRingLayer; @property (nonatomic) CAShapeLayer *progressRingLayer; @property (nonatomic) UIColor *defaultBackgroundRingColor; @property (nonatomic) UIColor *defaultForegroundRingColor; -@property (nonatomic) UIBezierPath *progressArcPath; @property (nonatomic) CGFloat borderWidth; @property (nonatomic, readonly) CGFloat radius; @property (nonatomic) NSTimeInterval animationDuration; diff --git a/Examples/ATLSampleConversationAvatarItem.m b/Examples/ATLSampleConversationAvatarItem.m index 83da736dd..52a490598 100644 --- a/Examples/ATLSampleConversationAvatarItem.m +++ b/Examples/ATLSampleConversationAvatarItem.m @@ -21,14 +21,19 @@ @implementation ATLSampleConversationAvatarItem -- (NSString *)avatarItemFullName +- (UIImage *)avatarImage { - return @"Kevin Coleman"; + return nil; } -- (UIImage *)avatarItemImage +- (NSString *)avatarInitials { - return nil; + return @"KC"; +} + +- (NSURL *)avatarImageURL +{ + return [NSURL URLWithString:@"http://lorempixel.com/400/200/"]; } @end diff --git a/Examples/ATLSampleConversationListViewController.h b/Examples/ATLSampleConversationListViewController.h index ad5358dbe..15e449471 100644 --- a/Examples/ATLSampleConversationListViewController.h +++ b/Examples/ATLSampleConversationListViewController.h @@ -21,6 +21,8 @@ #import #import +extern NSString *const ATLConversationTableViewAccessibilityIdentifier; + @interface ATLSampleConversationListViewController : ATLConversationListViewController @end diff --git a/Examples/ATLSampleConversationListViewController.m b/Examples/ATLSampleConversationListViewController.m index 1eb394035..c4c8c91e4 100644 --- a/Examples/ATLSampleConversationListViewController.m +++ b/Examples/ATLSampleConversationListViewController.m @@ -30,8 +30,6 @@ @interface ATLSampleConversationListViewController () @property (nonatomic, readonly) NSURL *identifier LYR_QUERYABLE_PROPERTY; -@property (nonatomic) NSUInteger index LYR_QUERYABLE_PROPERTY; -@property (nonatomic) LYRConversationMock *conversation LYR_QUERYABLE_PROPERTY; +@property (nonatomic) NSUInteger position LYR_QUERYABLE_PROPERTY; +@property (nonatomic, weak) LYRConversationMock *conversation LYR_QUERYABLE_PROPERTY; @property (nonatomic) NSArray *parts; @property (nonatomic, readonly) BOOL isSent LYR_QUERYABLE_PROPERTY; @property (nonatomic, readonly) BOOL isDeleted; @property (nonatomic, readonly) BOOL isUnread LYR_QUERYABLE_PROPERTY; @property (nonatomic) NSDate *sentAt LYR_QUERYABLE_PROPERTY; @property (nonatomic) NSDate *receivedAt LYR_QUERYABLE_PROPERTY; -@property (nonatomic, readonly) NSString *sentByUserID LYR_QUERYABLE_PROPERTY; +@property (nonatomic, readonly) LYRActorMock *sender; @property (nonatomic) NSDictionary *recipientStatusByUserID; + (instancetype)newMessageWithParts:(NSArray *)messageParts senderID:(NSString *)senderID; ++ (instancetype)newMessageWithParts:(NSArray *)messageParts senderName:(NSString *)senderName; + - (BOOL)markAsRead:(NSError **)error; - (BOOL)delete:(LYRDeletionMode)deletionMode error:(NSError **)error; diff --git a/Examples/Mocks/LYRMessageMock.m b/Examples/Mocks/LYRMessageMock.m index 8c6b7c103..f3de8a51d 100644 --- a/Examples/Mocks/LYRMessageMock.m +++ b/Examples/Mocks/LYRMessageMock.m @@ -20,13 +20,16 @@ #import "LYRMessageMock.h" #import "LYRMockContentStore.h" +@implementation LYRActorMock +@end + @interface LYRMessageMock () @property (nonatomic, readwrite) NSURL *identifier; @property (nonatomic, readwrite) BOOL isSent; @property (nonatomic, readwrite) BOOL isDeleted; @property (nonatomic, readwrite) BOOL isUnread; -@property (nonatomic, readwrite) NSString *sentByUserID; +@property (nonatomic, readwrite) LYRActorMock *sender; @end @@ -37,11 +40,23 @@ - (id)initWithMessageParts:(NSArray *)messageParts senderID:(NSString *)senderID self = [super init]; if (self) { _parts = messageParts; - _sentByUserID = senderID; + _sender = [LYRActorMock new]; + _sender.userID = senderID; } return self; } +- (id)initWithMessageParts:(NSArray *)messageParts senderName:(NSString *)senderName +{ + self = [super init]; + if (self) { + _parts = messageParts; + _sender = [LYRActorMock new]; + _sender.name = senderName; + } + return self; +} + + (instancetype)newMessageWithParts:(NSArray *)messageParts senderID:(NSString *)senderID { LYRMessageMock *mock = [[self alloc] initWithMessageParts:messageParts senderID:senderID]; @@ -52,6 +67,16 @@ + (instancetype)newMessageWithParts:(NSArray *)messageParts senderID:(NSString * return mock; } ++ (instancetype)newMessageWithParts:(NSArray *)messageParts senderName:(NSString *)senderName +{ + LYRMessageMock *mock = [[self alloc] initWithMessageParts:messageParts senderName:senderName]; + mock.identifier = [NSURL URLWithString:[[NSUUID UUID] UUIDString]]; + mock.isSent = NO; + mock.isDeleted = NO; + mock.isUnread = YES; + return mock; +} + - (BOOL)markAsRead:(NSError **)error { self.isUnread = NO; diff --git a/Examples/Mocks/LYRMockContentStore.m b/Examples/Mocks/LYRMockContentStore.m index ed3f048c1..357b41b46 100644 --- a/Examples/Mocks/LYRMockContentStore.m +++ b/Examples/Mocks/LYRMockContentStore.m @@ -193,10 +193,10 @@ - (LYRMessageMock *)messageForIdentifier:(NSURL *)identifier - (void)reindexMessagesForConversation:(LYRConversationMock *)conversation { - LYRPredicate *predicate = [LYRPredicate predicateWithProperty:@"conversation" operator:LYRPredicateOperatorIsEqualTo value:conversation]; + LYRPredicate *predicate = [LYRPredicate predicateWithProperty:@"conversation" predicateOperator:LYRPredicateOperatorIsEqualTo value:conversation]; NSOrderedSet *message = [self fetchObjectsWithClass:[LYRMessage class] predicate:predicate sortDescriptior:@[[NSSortDescriptor sortDescriptorWithKey:@"index" ascending:YES]]]; [message enumerateObjectsWithOptions:NSEnumerationConcurrent usingBlock:^(id obj, NSUInteger idx, BOOL *stop) { - [(LYRMessageMock *)obj setIndex:idx]; + [(LYRMessageMock *)obj setPosition:idx]; }]; } @@ -212,8 +212,12 @@ - (NSOrderedSet *)fetchObjectsWithClass:(Class)objectClass predicate:(LYRPredica if ([objectClass isSubclassOfClass:[LYRConversation class]]) { NSOrderedSet *filteredSet; if (predicate) { - NSPredicate *conversationPredicate = [self constructPredicateForMockPredicate:predicate]; - filteredSet = [[NSOrderedSet alloc] initWithSet:[self.conversations filteredSetUsingPredicate:conversationPredicate]]; + if ([predicate isKindOfClass:[LYRCompoundPredicate class]]) { + filteredSet = [[NSOrderedSet alloc] initWithSet:self.conversations]; + } else { + NSPredicate *conversationPredicate = [self constructPredicateForMockPredicate:predicate]; + filteredSet = [[NSOrderedSet alloc] initWithSet:[self.conversations filteredSetUsingPredicate:conversationPredicate]]; + } } else { filteredSet = [[NSOrderedSet alloc] initWithSet:self.conversations]; } @@ -222,8 +226,12 @@ - (NSOrderedSet *)fetchObjectsWithClass:(Class)objectClass predicate:(LYRPredica } else if ([objectClass isSubclassOfClass:[LYRMessage class]]) { NSOrderedSet *filteredSet; if (predicate) { - NSPredicate *messagePredicate = [self constructPredicateForMockPredicate:predicate]; - filteredSet = [[NSOrderedSet alloc] initWithSet:[self.messages filteredSetUsingPredicate:messagePredicate]]; + if ([predicate isKindOfClass:[LYRCompoundPredicate class]]) { + filteredSet = [[NSOrderedSet alloc] initWithSet:self.messages]; + } else { + NSPredicate *messagePredicate = [self constructPredicateForMockPredicate:predicate]; + filteredSet = [[NSOrderedSet alloc] initWithSet:[self.messages filteredSetUsingPredicate:messagePredicate]]; + } } else { filteredSet = [[NSOrderedSet alloc] initWithSet:self.messages]; } @@ -255,7 +263,10 @@ - (NSPredicate *)constructPredicateForMockPredicate:(LYRPredicate *)predicate return [NSPredicate predicateWithFormat:@"SELF.%@ <= %@", predicate.property, predicate.value]; case LYRPredicateOperatorIsIn: { - NSPredicate *predicatee = [NSPredicate predicateWithFormat:@"%@ IN SELF.%@", predicate.value, predicate.property]; + if ([predicate.value isKindOfClass:[NSSet class]]) { + return [NSPredicate predicateWithFormat:@"ANY SELF.%K IN %@", predicate.property, predicate.value]; + } + NSPredicate *predicatee = [NSPredicate predicateWithFormat:@"SELF.%@ CONTAINS %@ ", predicate.property, predicate.value]; return predicatee; } case LYRPredicateOperatorIsNotIn: @@ -269,7 +280,7 @@ - (NSPredicate *)constructPredicateForMockPredicate:(LYRPredicate *)predicate - (void)broadcastChanges { - if (self.shouldBroadcastChanges) { + if (self.shouldBroadcastChanges && self.mockObjectChanges.count) { [[NSNotificationCenter defaultCenter] postNotificationName:LYRMockObjectsDidChangeNotification object:self.mockObjectChanges]; } [self.mockObjectChanges removeAllObjects]; diff --git a/Examples/Mocks/LYRQueryControllerMock.h b/Examples/Mocks/LYRQueryControllerMock.h index 64b2dfe0e..6aaceb9e1 100644 --- a/Examples/Mocks/LYRQueryControllerMock.h +++ b/Examples/Mocks/LYRQueryControllerMock.h @@ -57,6 +57,8 @@ - (BOOL)execute:(NSError **)error; +- (NSDictionary *)indexPathsForObjectsWithIdentifiers:(NSSet *)objectIdentifiers; + @end diff --git a/Examples/Mocks/LYRQueryControllerMock.m b/Examples/Mocks/LYRQueryControllerMock.m index e61b38d60..5e2f563af 100644 --- a/Examples/Mocks/LYRQueryControllerMock.m +++ b/Examples/Mocks/LYRQueryControllerMock.m @@ -138,6 +138,12 @@ - (void)setUpdatableProperties:(NSSet *)updatableProperties _updatableProperties = updatableProperties; } +- (NSDictionary *)indexPathsForObjectsWithIdentifiers:(NSSet *)objectIdentifiers; +{ + NSUInteger maxIndex = [[LYRMockContentStore sharedStore] allMessages].count - 1; + return [[NSDictionary alloc] initWithObjects:@[[NSIndexPath indexPathForRow:0 inSection:maxIndex]] forKeys:@[self.layerClient.authenticatedUserID]]; +} + - (void)dealloc { [[NSNotificationCenter defaultCenter] removeObserver:self]; diff --git a/Examples/Programmatic/Images.xcassets/LaunchImage.launchimage/Contents.json b/Examples/Programmatic/Images.xcassets/LaunchImage.launchimage/Contents.json new file mode 100644 index 000000000..e37b64949 --- /dev/null +++ b/Examples/Programmatic/Images.xcassets/LaunchImage.launchimage/Contents.json @@ -0,0 +1,21 @@ +{ + "images" : [ + { + "orientation" : "portrait", + "idiom" : "iphone", + "minimum-system-version" : "7.0", + "scale" : "2x" + }, + { + "orientation" : "portrait", + "idiom" : "iphone", + "minimum-system-version" : "7.0", + "subtype" : "retina4", + "scale" : "2x" + } + ], + "info" : { + "version" : 1, + "author" : "xcode" + } +} \ No newline at end of file diff --git a/Examples/Programmatic/ProgrammaticAppDelegate.m b/Examples/Programmatic/ProgrammaticAppDelegate.m index 3d839eb09..0b17a5547 100644 --- a/Examples/Programmatic/ProgrammaticAppDelegate.m +++ b/Examples/Programmatic/ProgrammaticAppDelegate.m @@ -11,6 +11,11 @@ #import "LayerKitMock.h" #import +static BOOL ATLIsRunningTests() +{ + return (NSClassFromString(@"XCTestCase") || [[[NSProcessInfo processInfo] environment] valueForKey:@"XCInjectBundle"]); +} + @interface ProgrammaticAppDelegate () @end @@ -23,8 +28,13 @@ - (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:( LYRClientMock *layerClient = [LYRClientMock layerClientMockWithAuthenticatedUserID:mockUser.participantIdentifier]; [[LYRMockContentStore sharedStore] hydrateConversationsForAuthenticatedUserID:layerClient.authenticatedUserID count:1]; - ATLSampleConversationListViewController *controller = [ATLSampleConversationListViewController conversationListViewControllerWithLayerClient:(LYRClient *)layerClient]; - controller.view.backgroundColor = [UIColor whiteColor]; + UIViewController *controller; + if (ATLIsRunningTests()) { + controller = [UIViewController new]; + } else { + controller = [ATLSampleConversationListViewController conversationListViewControllerWithLayerClient:(LYRClient *)layerClient]; + controller.view.backgroundColor = [UIColor whiteColor]; + } UINavigationController *rootViewController = [[UINavigationController alloc] initWithRootViewController:controller]; self.window = [[UIWindow alloc] initWithFrame:[UIScreen mainScreen].bounds]; diff --git a/Examples/Storyboard/Atlas.storyboard b/Examples/Storyboard/Atlas.storyboard index 4ed7ea095..ba620c566 100644 --- a/Examples/Storyboard/Atlas.storyboard +++ b/Examples/Storyboard/Atlas.storyboard @@ -1,7 +1,8 @@ - + - + + diff --git a/Gemfile b/Gemfile index fc134e47d..53294353f 100644 --- a/Gemfile +++ b/Gemfile @@ -1,11 +1,10 @@ source "http://rubygems.org" ruby '2.1.2' -gem "rake", "~> 10.4.2" -gem 'cocoapods', '= 0.35.0' +gem "rake", ">= 10.3.2" +gem 'cocoapods', '~> 0.36.0' gem 'xcpretty', '~> 0.1.7' gem 'xctasks', '~> 0.2.2' gem 'plist', '~> 3.1.0' gem 'shenzhen', '0.10.2' gem 'byebug' - diff --git a/Gemfile.lock b/Gemfile.lock index 90473e8bc..34453dafa 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -16,29 +16,29 @@ GEM columnize (~> 0.8) debugger-linecache (~> 1.2) slop (~> 3.6) - claide (0.7.0) - cocoapods (0.35.0) + claide (0.8.1) + cocoapods (0.36.0) activesupport (>= 3.2.15) - claide (~> 0.7.0) - cocoapods-core (= 0.35.0) - cocoapods-downloader (~> 0.8.0) - cocoapods-plugins (~> 0.3.1) - cocoapods-trunk (~> 0.4.1) - cocoapods-try (~> 0.4.2) + claide (~> 0.8.1) + cocoapods-core (= 0.36.0) + cocoapods-downloader (~> 0.8.1) + cocoapods-plugins (~> 0.4.1) + cocoapods-trunk (~> 0.6.0) + cocoapods-try (~> 0.4.3) colored (~> 1.2) escape (~> 0.0.4) - molinillo (~> 0.1.2) + molinillo (~> 0.2.1) nap (~> 0.8) open4 (~> 1.3) - xcodeproj (~> 0.20.2) - cocoapods-core (0.35.0) + xcodeproj (~> 0.23.0) + cocoapods-core (0.36.0) activesupport (>= 3.2.15) fuzzy_match (~> 2.0.4) nap (~> 0.8.0) cocoapods-downloader (0.8.1) - cocoapods-plugins (0.3.2) + cocoapods-plugins (0.4.1) nap - cocoapods-trunk (0.4.1) + cocoapods-trunk (0.6.0) nap (>= 0.8) netrc (= 0.7.8) cocoapods-try (0.4.3) @@ -61,7 +61,7 @@ GEM json (1.8.2) mini_portile (0.6.2) minitest (5.5.1) - molinillo (0.1.2) + molinillo (0.2.1) multipart-post (1.2.0) nap (0.8.0) net-sftp (2.1.2) @@ -89,10 +89,10 @@ GEM terminal-table (~> 1.4.5) slop (3.6.0) terminal-table (1.4.5) - thread_safe (0.3.4) + thread_safe (0.3.5) tzinfo (1.2.2) thread_safe (~> 0.1) - xcodeproj (0.20.2) + xcodeproj (0.23.0) activesupport (>= 3) colored (~> 1.2) xcpretty (0.1.7) @@ -103,9 +103,9 @@ PLATFORMS DEPENDENCIES byebug - cocoapods (= 0.35.0) + cocoapods (~> 0.36.0) plist (~> 3.1.0) - rake (~> 10.4.2) + rake (>= 10.3.2) shenzhen (= 0.10.2) xcpretty (~> 0.1.7) xctasks (~> 0.2.2) diff --git a/Podfile.lock b/Podfile.lock index 3281108e8..bc88235bd 100644 --- a/Podfile.lock +++ b/Podfile.lock @@ -1,15 +1,15 @@ PODS: - - Atlas (1.0.0): - - LayerKit - - Expecta (0.3.2) - - KIF (3.1.2): - - KIF/XCTest (= 3.1.2) - - KIF/XCTest (3.1.2) + - Atlas (1.0.11): + - LayerKit (>= 0.16.0) + - Expecta (1.0.2) + - KIF (3.2.3): + - KIF/XCTest (= 3.2.3) + - KIF/XCTest (3.2.3) - KIFViewControllerActions (1.0.0): - KIF (>= 2.0.0) - - LayerKit (0.10.1) + - LayerKit (0.16.0) - LYRCountDownLatch (0.9.0) - - OCMock (3.1.2) + - OCMock (3.1.5) DEPENDENCIES: - Atlas (from `.`) @@ -22,7 +22,7 @@ DEPENDENCIES: EXTERNAL SOURCES: Atlas: - :path: . + :path: "." KIFViewControllerActions: :git: https://github.com/blakewatters/KIFViewControllerActions.git LYRCountDownLatch: @@ -30,19 +30,19 @@ EXTERNAL SOURCES: CHECKOUT OPTIONS: KIFViewControllerActions: - :commit: 41edeeade3ea0618c2ddac6c90a648f322f1b5a2 + :commit: fbcaaaf2a6236c6ed840ce011a44f7e3e1f7570d :git: https://github.com/blakewatters/KIFViewControllerActions.git LYRCountDownLatch: :commit: 02119f855ad14e7fc0dae32bbbf17c735a281cfe :git: https://github.com/layerhq/LYRCountDownLatch.git SPEC CHECKSUMS: - Atlas: acc11f35e860eabda2f0fc0f5ca5e6629f40870b - Expecta: ee641011fe10aa1855d487b40e4976dac50ec342 - KIF: 068074b24dd455025e7f2a488ba4fb07b9ce047b + Atlas: 9bb6dccf270236d591b36882288ecf185b464f82 + Expecta: 54e8a3530add08f4f0208c111355eda7cde74a53 + KIF: a94bffe9c97e449e44f8fa481c53243d21309e1e KIFViewControllerActions: 73085acd975ebbfc954f7895ca1aaa9faa36b3c6 - LayerKit: 5ee212e2922750ddef4ff6ddd4f1b986c0b85fb6 - LYRCountDownLatch: 72a444f729ca5a8c6157c0b58f335a44e6702b48 - OCMock: ecdd510b73ef397f2f97274785c1e87fd147c49f + LayerKit: 8976a2a4d5f73b808e6328a5c66732605a3c247a + LYRCountDownLatch: 9b440b42a19ddbf4e75bdd4b43726baa1527606a + OCMock: 4c2925291f80407c3738dd1db14d21d0cc278864 -COCOAPODS: 0.36.0.rc.1 +COCOAPODS: 0.38.2 diff --git a/README.md b/README.md index aa18f51fd..6848b97fa 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ ![Atlas Header](Assets/atlas-github-header.png) -[![Build Status](http://img.shields.io/travis/layerhq/Atlas-iOS/development.svg?style=flat)](https://travis-ci.org/layerhq/Atlas-iOS) +[![Build Status](http://img.shields.io/travis/layerhq/Atlas-iOS.svg?style=flat)](https://travis-ci.org/layerhq/Atlas-iOS) [![Pod Version](http://img.shields.io/cocoapods/v/Atlas.svg?style=flat)](http://cocoadocs.org/docsets/Atlas/) [![Pod Platform](http://img.shields.io/cocoapods/p/Atlas.svg?style=flat)](http://cocoadocs.org/docsets/Atlas/) [![Pod License](http://img.shields.io/cocoapods/l/Atlas.svg?style=flat)](https://www.apache.org/licenses/LICENSE-2.0.html) @@ -11,7 +11,7 @@ Atlas is a lightweight, flexible set of user interface components designed to en ## Overview -Atlas provides several complete user interface experiences as well as a large library of individual views. It was designed to address three use cases simulatenously: +Atlas provides several complete user interface experiences as well as a large library of individual views. It was designed to address three use cases simultaneously: 1. Provide good looking, high quality implementations of familiar messaging experiences out of the box. By default Atlas is styled to look much like iMessage. 2. Enable quick and easy branding of the user experience via integration with `UIAppearance` and Interface Builder. All fonts, colors, etc. can be customized via an extensive set of `UIAppearance` selectors. This enables developers to quickly add messaging to an existing application and match the components with the existing style. @@ -30,7 +30,7 @@ The table below details the most important classes in Atlas and is hyperlinked d - + @@ -64,11 +64,11 @@ The table below details the most important classes in Atlas and is hyperlinked d - + - + @@ -81,6 +81,8 @@ The table below details the most important classes in Atlas and is hyperlinked d
Controllers
ATLUIAddressBarControllerATLAddressBarViewController A controller that provides a searchable address input experience for selecting users to message.
ATLParticipantA protocol adopted by objects that wish to represent participants in a Converation.A protocol adopted by objects that wish to represent participants in a Conversation.
ATLParticipantPresentingA protocol adopted by objects that wish to represent participants in a Converation.A protocol adopted by objects that wish to represent participants in a Conversation.
Views
+The complete API documentation can be found on [CocoaDocs](http://cocoadocs.org/docsets/Atlas/). + ## Installation Atlas can be installed directly into your application via CocoaPods or by directly importing the source code files. Please note that Atlas has a direct dependency on LayerKit that must be satisfied in order to build the components. @@ -114,13 +116,15 @@ If you wish to install Atlas directly into your application from source, then cl 1. Drag and drop the files from the `Code` and `Resources` directories onto your project, instructing Xcode to copy items into your destination group's folder. 2. Update your project settings to include the linker flags: `-ObjC -lz` -3. Add the following Cocoa SDK frameworks to your project: `'CFNetwork', 'Security', 'MobileCoreServices', 'SystemConfiguration', 'CoreLocation'` +3. Add the following Cocoa SDK frameworks to your project: `'CFNetwork', 'Security', 'MobileCoreServices', 'SystemConfiguration', 'CoreLocation', 'AssetsLibrary', 'ImageIO'` + +**Please note that LayerKit is a dependency of `Atlas`. When manually installing `Atlas`, the same must be done with `LayerKit`. Instructions on doing so can be found in the [LayerKit releases repository](https://github.com/layerhq/releases-ios#framework-installation).** Build and run your project to verify installation was successful. ## Getting Started -1. **Subclass** - Subclass the `ATLConversationViewController` or `ATLConversationListViewController` +1. **Subclass** - Subclass the [ATLConversationViewController](Code/Controllers/ATLConversationViewController.h) or [ATLConversationListViewController](Code/Controllers/ATLConversationListViewController.h) 2. **Implement** - Both controllers declare delegate and data source protocols. Your subclasses must implement these protocols. 3. **Customize** - The Atlas leverages the `UIAppearance` protocol to allow for effortless customization of components. 4. **Communicate** - Use the LayerKit SDK and the Atlas to build compelling messaging applications. @@ -138,13 +142,13 @@ NSString *ATLMIMETypeLocation; // location ## Component Details -### ATLConversationListViewController +### [ATLConversationListViewController](Code/Controllers/ATLConversationListViewController.h) -The `ATLConversationListViewController` provides a customizable UITableViewController subclass for displaying a list of Layer conversations. Conversations are represented by a Conversation label, the latest message content, and the latest message date. The controller handles fetching and ordering conversation based on the latest message date. +The [ATLConversationListViewController](Code/Controllers/ATLConversationListViewController.h) provides a customizable `UITableViewController` subclass for displaying a list of Layer conversations. Conversations are represented by a Conversation label, the latest message content, and the latest message date. The controller handles fetching and ordering conversation based on the latest message date. #### Initialization -The `ATLConversationListViewController` is initialized with a LYRClient object. +The [ATLConversationListViewController](Code/Controllers/ATLConversationListViewController.h) is initialized with a LYRClient object. ```objc ATLConversationListViewController *viewController = [ATLConversationListViewController conversationListViewControllerWithLayerClient:layerClient]; @@ -152,26 +156,26 @@ ATLConversationListViewController *viewController = [ATLConversationListViewCont #### Customization -The `ATLConverationListViewController` displays `ATLConversationTableViewCells`. The cells themselves provide for customization via UIAppearance selectors. +The [ATLConversationListViewController](Code/Controllers/ATLConversationListViewController.h) displays [ATLConversationTableViewCell](Code/Views/ATLConversationTableViewCell.h). The cells themselves provide for customization via `UIAppearance` selectors. ```objc [[ATLConversationTableViewCell appearance] setConversationLabelFont:[UIFont boldSystemFontOfSize:14]]; [[ATLConversationTableViewCell appearance] setConversationLabelColor:[UIColor blackColor]]; ``` -### ATLConversationViewController +### [ATLConversationViewController](Code/Controllers/ATLConversationViewController.h) -The `ATLConversationViewController` provides a customizable `UICollectionViewController` subclass for displaying individual Layer conversations. The controller is initialized with and `LYRClient` object and an `LYRConversation` object. It handles fetching, displaying and sending messages via LayerKit. The controller leverages the `ATLMessageInputToolbar` object to allow for text and content input. +The [ATLConversationViewController](Code/Controllers/ATLConversationViewController.h) provides a customizable `UICollectionViewController` subclass for displaying individual Layer conversations. The controller is initialized with and `LYRClient` object and an `LYRConversation` object. It handles fetching, displaying and sending messages via LayerKit. The controller leverages the [ATLMessageInputToolbar](Code/Views/ATLMessageInputToolbar.h) object to allow for text and content input. #### Initialization ```objc -ATLConverationViewController *viewController = [ATLConversationViewController conversationViewControllerWithConversation:conversation layerClient:self.layerClient]; +ATLConverationViewController *viewController = [ATLConversationViewController conversationViewControllerWithLayerClient:self.layerClient]; ``` #### Customization -The `ATLConverationViewController` displays both incoming and outgoing flavors of `ATLMessageCollectionViewCell`. The cells themselves provide for customization via UIAppearance selectors. +The [ATLConverationViewController](Code/Controllers/ATLConversationViewController.h) displays both incoming and outgoing flavors of [ATLMessageCollectionViewCell](Code/Views/ATLMessageCollectionViewCell.h). The cells themselves provide for customization via UIAppearance selectors. ```objc [[ATLOutgoingMessageCollectionViewCell appearance] setMessageTextColor:[UIColor whiteColor]]; @@ -179,47 +183,101 @@ The `ATLConverationViewController` displays both incoming and outgoing flavors o [[ATLOutgoingMessageCollectionViewCell appearance] setBubbleViewColor:[UIColor blueColor]]; ``` -### ATLParticipantPicker +### [ATLParticipantTableViewController](Code/Controllers/ATLParticipantTableViewController.h) -The `ATLParticipantPickerController` provides a `UINavigationController` subclass that displays a list of users conforming to the `ATLParticipant` protocol. The controller provides support for sorting and ordering participants based on either first or last name. The controller also provides multi-selection support and an optional selection indicator. +The [ATLParticipantTableViewController](Code/Controllers/ATLParticipantTableViewController.h) provides a `UINavigationController` subclass that displays a list of users conforming to the [ATLParticipant](Code/Protocols/ATLParticipant.h) protocol. The controller provides support for sorting and ordering participants based on either first or last name. The controller also provides multi-selection support and an optional selection indicator. #### Initialization -The `ATLParticipantPickerController` is initialized with an object conforming to the `ATLParticipantPickerDataSource` and a sortType. +The [ATLParticipantTableViewController](Code/Controllers/ATLParticipantTableViewController.h) is initialized with a participant list and a sortType. ```objc -ATLParticipantPickerSortType sortType = ATLParticipantPickerControllerSortTypeFirst; -ATLParticipantPickerController *controller = [ATLParticipantPickerController participantPickerWithDataSource:dataSource - sortType:sortType]; +ATLParticipantPickerSortType sortType = ATLParticipantPickerSortTypeFirstName; +ATLParticipantTableViewController *controller = [ATLParticipantTableViewController participantTableViewControllerWithParticipants:participants sortType:sortType]; ``` #### Customization -The `ATLParticipantPickerController` displays `ATLParticipantTableViewCells`. The cells themselves provide for customization via UIAppearance selectors. +The [ATLParticipantTableViewController](Code/Controllers/ATLParticipantTableViewController.h) displays [ATLParticipantTableViewCell](Code/Views/ATLParticipantTableViewCell.h). The cells themselves provide for customization via `UIAppearance` selectors. ```objc [[ATLParticipantTableViewCell appearance] setTitleColor:[UIColor blackColor]]; [[ATLParticipantTableViewCell appearance] setTitleFont:[UIFont systemFontOfSize:14]]; ``` -### ATLMessageInputToolbar +### Presenters + +While the Atlas does provide highly customizable TableView and CollectionView cells, advanced customization of the UI components can be done by implementing custom cells and setting the component's `cellClass` property. The Atlas component CollectionView and TableView Cells share a common Presenter pattern where each cell displayed in a Component conforms to a specific presenter protocol. If you would like to swap out the default cells for cells that you build, this can easily accomplished via implementing cells that conform to the presenter patterns and setting the `cellClass` property of the controller. -The `ATLMessageInputToolbar` provides a `UIToolbar` subclass that supports text and image input. The toolbar handles auto-resizing itself relative to its content. +The presenters are [ATLParticipantPresenting](Code/Protocols/ATLParticipantPresenting.h), [ATLConversationPresenting](Code/Protocols/ATLConversationPresenting.h), and [ATLMessagePresenting](Code/Protocols/ATLMessagePresenting.h). -#### Initialization +## Configuring UI Appearance -The `ATLMessageInputToolbar` is initialized with a `UIViewController` object and sets itself as the inputAccessoryView of the ViewController. In order to do this, the `inputAcccessoryView` property of the view controller must first be re-declared in the implementation file of the view controller class. +Atlas takes advantage of Apple's `UIAppearance` protocol which lets you change UI appearance very easily. The following is a list of all Atlas properties conforming to `UIAppearance`: +### [ATLMessageCollectionViewCell](Code/Views/ATLMessageCollectionViewCell.h) +##### ([ATLOutgoingMessageCollectionViewCell](Code/Views/ATLOutgoingMessageCollectionViewCell.h) and [ATLIncomingMessageCollectionViewCell](Code/Views/ATLIncomingMessageCollectionViewCell.h) extend this class) + +```objc +@property (nonatomic) UIFont *messageTextFont +@property (nonatomic) UIColor *messageTextColor +@property (nonatomic) UIColor *messageLinkTextColor +@property (nonatomic) UIColor *bubbleViewColor +@property (nonatomic) CGFloat bubbleViewCornerRadius ``` -self.inputAccessoryView = [ATLMessageInputToolbar inputToolBarWithViewController:self]; + +### [ATLAddressBarTextView](Code/Views/ATLAddressBarTextView.h) + +```objc +@property (nonatomic) UIFont *addressBarFont +@property (nonatomic) UIColor *addressBarTextColor +@property (nonatomic) UIColor *addressBarHighlightColor ``` -Once initialized, the controller manages resizing itself relative to its content, and animation so that it sticks to the top of the keyboard. -### Presenters +### [ATLAvatarImageView](Code/Views/ATLAvatarImageView.h) -While the Atlas does provide highly customizable TableView and CollectionView cells, advanced customization of the UI components can be done by implementing custom cells and setting the component's `cellClass` property. The Atlas component CollectionView and TableView Cells share a common Presenter pattern where each cell displayed in a Component conforms to a specific presenter protocol. If you would like to swap out the default cells for cells that you build, this can easily accomplished via implementing cells that conform to the presenter patterns and setting the `cellClass` property of the controller. +```objc +@property (nonatomic) CGFloat avatarImageViewDiameter +@property (nonatomic) UIFont *initialsFont +@property (nonatomic) UIColor *initialsColor +@property (nonatomic) UIColor *imageViewBackgroundColor +``` -The presenters are `ATLParticipantPresenting`, `ATLConversationPresenting`, and `ATLMessagePresenting`. +### [ATLConversationCollectionViewHeader](Code/Views/ATLConversationCollectionViewHeader.h) + +```objc +@property (nonatomic) UIFont *participantLabelFont +@property (nonatomic) UIColor *participantLabelTextColor +``` + +### [ATLConversationTableViewCell](Code/Views/ATLConversationTableViewCell.h) + +```objc +@property (nonatomic) UIFont *conversationTitleLabelFont +@property (nonatomic) UIColor *conversationTitleLabelColor +@property (nonatomic) UIFont *lastMessageLabelFont +@property (nonatomic) UIColor *lastMessageLabelColor +@property (nonatomic) UIFont *dateLabelFont +@property (nonatomic) UIColor *dateLabelColor +@property (nonatomic) UIColor *unreadMessageIndicatorBackgroundColor +@property (nonatomic) UIColor *cellBackgroundColor +``` + +### [ATLParticipantSectionHeaderView](Code/Views/ATLParticipantSectionHeaderView.h) + +```objc +@property (nonatomic) UIFont *sectionHeaderFont +@property (nonatomic) UIColor *sectionHeaderTextColor +@property (nonatomic) UIColor *sectionHeaderBackgroundColor +``` + +### [ATLParticipantTableViewCell](Code/Views/ATLParticipantTableViewCell.h) + +```objc +@property (nonatomic) UIFont *titleFont +@property (nonatomic) UIFont *boldTitleFont +@property (nonatomic) UIColor *titleColor +``` ## Contributing diff --git a/Rakefile b/Rakefile index 71f130558..27c1c46e2 100644 --- a/Rakefile +++ b/Rakefile @@ -55,17 +55,16 @@ task :init do end if defined?(XCTasks) - XCTasks::TestTask.new(test: :sim) do |t| + XCTasks::TestTask.new(:test) do |t| t.workspace = 'Atlas.xcworkspace' t.schemes_dir = 'Tests/Schemes' t.runner = :xcpretty t.output_log = 'xcodebuild.log' - t.subtask(app: 'ProgrammaticTests') do |s| - s.destination do |d| - d.platform = :iossimulator - d.name = 'Atlas-Test-Device' - d.os = :latest - end + t.subtasks = { progammatic: 'ProgrammaticTests'} + t.destination do |d| + d.platform = :iossimulator + d.os = :latest + d.name = 'iPhone 6 Plus' end end end @@ -173,7 +172,7 @@ task :release => [:fetch_origin] do with_clean_env do podspec = File.join(root_dir, "Atlas.podspec") puts green("Pushing podspec to CocoaPods trunk") - run "pod trunk push #{podspec}" + run "pod trunk push --use-libraries #{podspec}" end end end diff --git a/Tests/ATLConversationListViewControllerTest.m b/Tests/ATLConversationListViewControllerTest.m index 08a8ca034..54fc8a11e 100644 --- a/Tests/ATLConversationListViewControllerTest.m +++ b/Tests/ATLConversationListViewControllerTest.m @@ -24,6 +24,8 @@ #import "LYRClientMock.h" #import "ATLSampleConversationListViewController.h" +extern NSString *const ATLAvatarImageViewAccessibilityLabel; + @interface ATLConversationListViewController () @property (nonatomic) LYRQueryController *queryController; @@ -50,15 +52,14 @@ - (void)setUp - (void)tearDown { + [super tearDown]; + [tester waitForAnimationsToFinish]; [self.testInterface dismissPresentedViewController]; - self.viewController.queryController = nil; - self.viewController = nil; + if (self.viewController) self.viewController = nil; [[LYRMockContentStore sharedStore] resetContentStore]; [self resetAppearance]; self.testInterface = nil; - - [super tearDown]; } - (void)testToVerifyConversationListBaseUI @@ -148,7 +149,7 @@ - (void)testToVerifyEditingModeAndMultipleConversationDeletionFunctionality [tester tapViewWithAccessibilityLabel:[NSString stringWithFormat:@"Delete %@", mockUser3.fullName]]; [self deleteConversation:conversation3 deletionMode:LYRDeletionModeLocal]; - LYRQuery *query = [LYRQuery queryWithClass:[LYRConversation class]]; + LYRQuery *query = [LYRQuery queryWithQueryableClass:[LYRConversation class]]; NSError *error; NSOrderedSet *conversations = [self.testInterface.layerClient executeQuery:query error:&error]; expect(error).to.beNil; @@ -222,6 +223,23 @@ -(void)testToVerifyCustomCellClassFunctionality expect([cell class]).toNot.equal([ATLConversationTableViewCell class]); } +//Verify search bar does show up on screen for default `shouldDisplaySearchController` value `YES`. +- (void)testToVerifyDefaultShouldDisplaySearchControllerFunctionality +{ + self.viewController = [ATLSampleConversationListViewController conversationListViewControllerWithLayerClient:(LYRClient *)self.testInterface.layerClient]; + [self setRootViewController:self.viewController]; + [tester waitForViewWithAccessibilityLabel:@"Search Bar"]; +} + +//Verify search bar does not show up on screen if property set to `NO`. +- (void)testToVerifyShouldDisplaySearchControllerFunctionality +{ + self.viewController = [ATLSampleConversationListViewController conversationListViewControllerWithLayerClient:(LYRClient *)self.testInterface.layerClient]; + [self.viewController setShouldDisplaySearchController:NO]; + [self setRootViewController:self.viewController]; + [tester waitForAbsenceOfViewWithAccessibilityLabel:@"Search Bar"]; +} + //Verify that attempting to provide a cell class that does not conform to ATLConversationPresenting results in a runtime exception. - (void)testToVerifyCustomCellClassNotConformingToProtocolRaisesException { @@ -296,6 +314,7 @@ - (void)testToVerifyConversationListViewControllerDataSource }] conversationListViewController:[OCMArg any] avatarItemForConversation:[OCMArg any]]; conversation = (LYRConversation *)[self newConversationWithMockUser:mockUser1 lastMessageText:@"Test Message"]; + [delegateMock verify]; } #pragma mark - ATLConversationListViewControllerDelegate @@ -326,7 +345,6 @@ - (void)testToVerifyDelegateIsNotifiedOfConversationSelection [delegateMock verify]; } - - (void)testToVerifyDelegateIsNotifiedOfGlobalConversationDeletion { self.viewController = [ATLSampleConversationListViewController conversationListViewControllerWithLayerClient:(LYRClient *)self.testInterface.layerClient]; @@ -393,6 +411,136 @@ - (void)testToVerifyDelegateIsNotifiedOfLocalConversationDeletion [delegateMock verify]; } +- (void)testToVerifyDelegateIsNotifiedOfSearch +{ + self.viewController = [ATLSampleConversationListViewController conversationListViewControllerWithLayerClient:(LYRClient *)self.testInterface.layerClient]; + self.viewController.allowsEditing = YES; + [self setRootViewController:self.viewController]; + [tester waitForTimeInterval:0.5]; + + id delegateMock = OCMProtocolMock(@protocol(ATLConversationListViewControllerDelegate)); + self.viewController.delegate = delegateMock; + + ATLUserMock *mockUser1 = [ATLUserMock userWithMockUserName:ATLMockUserNameKlemen]; + LYRConversationMock *conversation1 = [self newConversationWithMockUser:mockUser1 lastMessageText:@"Test Message"]; + + __block NSString *searchText = @"T"; + [[[delegateMock expect] andDo:^(NSInvocation *invocation) { + ATLConversationListViewController *controller; + [invocation getArgument:&controller atIndex:2]; + expect(controller).to.equal(self.viewController); + + NSString *searchText; + [invocation getArgument:&searchText atIndex:3]; + expect(searchText).to.equal(searchText); + }] conversationListViewController:[OCMArg any] didSearchForText:searchText completion:[OCMArg any]]; + + [tester swipeViewWithAccessibilityLabel:[self.testInterface conversationLabelForConversation:conversation1] inDirection:KIFSwipeDirectionDown]; + [tester tapViewWithAccessibilityLabel:@"Search Bar"]; + [tester enterText:searchText intoViewWithAccessibilityLabel:@"Search Bar"]; + [delegateMock verify]; +} + +- (void)testToVerifyCustomDeletionColorAndText +{ + self.viewController = [ATLSampleConversationListViewController conversationListViewControllerWithLayerClient:(LYRClient *)self.testInterface.layerClient]; + self.viewController.allowsEditing = YES; + [self setRootViewController:self.viewController]; + + ATLUserMock *mockUser1 = [ATLUserMock userWithMockUserName:ATLMockUserNameKlemen]; + LYRConversationMock *conversation1 = [self newConversationWithMockUser:mockUser1 lastMessageText:@"Test Message"]; + [tester waitForAnimationsToFinish]; + + id delegateMock = OCMProtocolMock(@protocol(ATLConversationListViewControllerDataSource)); + self.viewController.dataSource = delegateMock; + + [[[delegateMock expect] andDo:^(NSInvocation *invocation) { + ATLConversationListViewController *controller; + [invocation getArgument:&controller atIndex:2]; + expect(controller).to.equal(self.viewController); + + NSString *deletionTitle = @"Test"; + [invocation setReturnValue:&deletionTitle]; + }] conversationListViewController:[OCMArg any] textForButtonWithDeletionMode:LYRDeletionModeAllParticipants]; + + [[[delegateMock expect] andDo:^(NSInvocation *invocation) { + ATLConversationListViewController *controller; + [invocation getArgument:&controller atIndex:2]; + expect(controller).to.equal(self.viewController); + + UIColor *green = [UIColor greenColor]; + [invocation setReturnValue:&green]; + }] conversationListViewController:[OCMArg any] colorForButtonWithDeletionMode:LYRDeletionModeAllParticipants]; + + [tester swipeViewWithAccessibilityLabel:[self.testInterface conversationLabelForConversation:conversation1] inDirection:KIFSwipeDirectionLeft]; + [delegateMock verify]; + + UIView *deleteButton = [tester waitForViewWithAccessibilityLabel:@"Test"]; + expect(deleteButton.backgroundColor).to.equal([UIColor greenColor]); +} + +- (void)testToVerifyDefaultQueryConfigurationDataSourceMethod +{ + self.viewController = [ATLConversationListViewController conversationListViewControllerWithLayerClient:(LYRClient *)self.testInterface.layerClient]; + self.viewController.allowsEditing = YES; + + id delegateMock = OCMProtocolMock(@protocol(ATLConversationListViewControllerDataSource)); + self.viewController.dataSource = delegateMock; + + [[[delegateMock expect] andDo:^(NSInvocation *invocation) { + ATLConversationListViewController *controller; + [invocation getArgument:&controller atIndex:2]; + expect(controller).to.equal(self.viewController); + + LYRQuery *query; + [invocation getArgument:&query atIndex:3]; + expect(query).toNot.beNil(); + + [invocation setReturnValue:&query]; + }] conversationListViewController:[OCMArg any] willLoadWithQuery:[OCMArg any]]; + + [self setRootViewController:self.viewController]; + [delegateMock verifyWithDelay:1]; +} + +- (void)testToVerifyQueryConfigurationTakesEffect +{ + self.viewController = [ATLConversationListViewController conversationListViewControllerWithLayerClient:(LYRClient *)self.testInterface.layerClient]; + self.viewController.allowsEditing = YES; + + id delegateMock = OCMProtocolMock(@protocol(ATLConversationListViewControllerDataSource)); + self.viewController.dataSource = delegateMock; + + __block NSSortDescriptor *sortDescriptor = [NSSortDescriptor sortDescriptorWithKey:@"identifier" ascending:YES]; + [[[delegateMock expect] andDo:^(NSInvocation *invocation) { + ATLConversationListViewController *controller; + [invocation getArgument:&controller atIndex:2]; + expect(controller).to.equal(self.viewController); + + LYRQuery *query; + [invocation getArgument:&query atIndex:3]; + expect(query).toNot.beNil(); + + query.sortDescriptors = @[sortDescriptor]; + [invocation setReturnValue:&query]; + }] conversationListViewController:[OCMArg any] willLoadWithQuery:[OCMArg any]]; + + [self setRootViewController:self.viewController]; + [delegateMock verifyWithDelay:2]; + + expect(self.viewController.queryController.query.sortDescriptors).will.contain(sortDescriptor); +} + +- (void)testToVerifyAvatarImageURLLoad +{ + self.viewController = [ATLSampleConversationListViewController conversationListViewControllerWithLayerClient:(LYRClient *)self.testInterface.layerClient]; + self.viewController.displaysAvatarItem = YES; + [self setRootViewController:self.viewController]; + + ATLAvatarImageView *imageView = (ATLAvatarImageView *)[tester waitForViewWithAccessibilityLabel:ATLAvatarImageViewAccessibilityLabel]; + expect(imageView.image).will.beTruthy; +} + - (LYRConversationMock *)newConversationWithMockUser:(ATLUserMock *)mockUser lastMessageText:(NSString *)lastMessageText { LYRConversationMock *conversation = [self.testInterface conversationWithParticipants:[NSSet setWithObject:mockUser.participantIdentifier] lastMessageText:lastMessageText]; diff --git a/Tests/ATLConversationTableViewCellTest.m b/Tests/ATLConversationTableViewCellTest.m index e4d8aa1de..8f8847bb1 100644 --- a/Tests/ATLConversationTableViewCellTest.m +++ b/Tests/ATLConversationTableViewCellTest.m @@ -24,6 +24,10 @@ #import "ATLTestInterface.h" #import "ATLSampleConversationListViewController.h" + NSString *const ATLConversationTableViewAccessibilityLabel; + NSString *const ATLImageMIMETypePlaceholderText; + NSString *const ATLLocationMIMETypePlaceholderText; + @interface ATLConversationTableViewCellTest : XCTestCase @property (nonatomic) ATLTestInterface *testInterface; @@ -34,9 +38,6 @@ @interface ATLConversationTableViewCellTest : XCTestCase @implementation ATLConversationTableViewCellTest NSString *ATLLastMessageText = @"ATLLastMessageText"; -extern NSString *const ATLConversationTableViewAccessibilityIdentifier; -extern NSString *const ATLImageMIMETypePlaceholderText; -extern NSString *const ATLLocationMIMETypePlaceholderText; - (void)setUp { diff --git a/Tests/ATLConversationViewControllerTest.m b/Tests/ATLConversationViewControllerTest.m index 88c4860c0..7233cda3f 100644 --- a/Tests/ATLConversationViewControllerTest.m +++ b/Tests/ATLConversationViewControllerTest.m @@ -22,6 +22,7 @@ #import "ATLTestInterface.h" #import "ATLSampleConversationViewController.h" #import "ATLUserMock.h" +#import "ATLTestUtilities.h" extern NSString *const ATLAvatarImageViewAccessibilityLabel; @@ -58,6 +59,7 @@ - (void)setUp - (void)tearDown { + [tester waitForAnimationsToFinish]; [self.testInterface dismissPresentedViewController]; self.viewController.conversationDataSource = nil; self.viewController = nil; @@ -96,25 +98,133 @@ - (void)testToVerifySentImageAppearsInConversationView [self sendPhotoMessage]; } -- (void)testToVerifyCorrectCellIsReturnedForMessage +- (void)testToVerifyCachingTextMediaAttachment { - LYRMessagePartMock *messagePart1 = [LYRMessagePartMock messagePartWithText:@"How are you1?"]; - LYRMessageMock *message1 = [self.testInterface.layerClient newMessageWithParts:@[messagePart1] options:nil error:nil]; - [self.conversation sendMessage:message1 error:nil]; + [self setupConversationViewController]; + UIViewController *baseViewController = [UIViewController new]; + [self setRootViewController:baseViewController]; + [baseViewController.navigationController pushViewController:self.viewController animated:YES]; + [tester waitForAnimationsToFinish]; + + ATLMessageInputToolbar *toolBar = (ATLMessageInputToolbar *)[tester waitForViewWithAccessibilityLabel:@"Message Input Toolbar"]; + [toolBar.textInputView setText:@"Test"]; + self.viewController = nil; + [baseViewController.navigationController dismissViewControllerAnimated:YES completion:nil]; + [tester waitForAnimationsToFinish]; + + [self setupConversationViewController]; + [baseViewController.navigationController pushViewController:self.viewController animated:YES]; + toolBar = (ATLMessageInputToolbar *)[tester waitForViewWithAccessibilityLabel:@"Message Input Toolbar"]; + expect(toolBar.textInputView.text).to.equal(@"Test"); +} + +- (void)testToVerifyCachingImageMediaAttachment +{ + [self setupConversationViewController]; + UIViewController *baseViewController = [UIViewController new]; + [self setRootViewController:baseViewController]; + [baseViewController.navigationController pushViewController:self.viewController animated:YES]; + [tester waitForAnimationsToFinish]; + + ATLMessageInputToolbar *toolBar = (ATLMessageInputToolbar *)[tester waitForViewWithAccessibilityLabel:@"Message Input Toolbar"]; + UIImage *image = ATLTestAttachmentMakeImageWithSize(CGSizeMake(1024, 512)); + ATLMediaAttachment *attachement = [ATLMediaAttachment mediaAttachmentWithImage:image metadata:nil thumbnailSize:100]; + [toolBar insertMediaAttachment:attachement withEndLineBreak:NO]; + self.viewController = nil; + [baseViewController.navigationController dismissViewControllerAnimated:YES completion:nil]; + [tester waitForAnimationsToFinish]; - LYRMessagePartMock *messagePart2 = [LYRMessagePartMock messagePartWithText:@"How are you2?"]; - LYRMessageMock *message2 = [self.testInterface.layerClient newMessageWithParts:@[messagePart2] options:nil error:nil]; - [self.conversation sendMessage:message2 error:nil]; + [self setupConversationViewController]; + [baseViewController.navigationController pushViewController:self.viewController animated:YES]; + toolBar = (ATLMessageInputToolbar *)[tester waitForViewWithAccessibilityLabel:@"Message Input Toolbar"]; + expect(toolBar.mediaAttachments.count).to.equal(1); + ATLMediaAttachment *imageAttachment = toolBar.mediaAttachments[0]; + expect(imageAttachment.mediaMIMEType).to.equal(ATLMIMETypeImageJPEG); +} - LYRMessagePartMock *messagePart3 = [LYRMessagePartMock messagePartWithText:@"How are you3?"]; - LYRMessageMock *message3 = [self.testInterface.layerClient newMessageWithParts:@[messagePart3] options:nil error:nil]; - [self.conversation sendMessage:message3 error:nil]; +- (void)testToVerifyCachingTextAndImageMediaAttachments +{ + [self setupConversationViewController]; + UIViewController *baseViewController = [UIViewController new]; + [self setRootViewController:baseViewController]; + [baseViewController.navigationController pushViewController:self.viewController animated:YES]; + [tester waitForAnimationsToFinish]; + + ATLMessageInputToolbar *toolBar = (ATLMessageInputToolbar *)[tester waitForViewWithAccessibilityLabel:@"Message Input Toolbar"]; + [toolBar.textInputView setText:@"Test"]; + UIImage *image = ATLTestAttachmentMakeImageWithSize(CGSizeMake(1024, 512)); + ATLMediaAttachment *attachement = [ATLMediaAttachment mediaAttachmentWithImage:image metadata:nil thumbnailSize:100]; + [toolBar insertMediaAttachment:attachement withEndLineBreak:NO]; + self.viewController = nil; + [baseViewController.navigationController dismissViewControllerAnimated:YES completion:nil]; + [tester waitForAnimationsToFinish]; + + [self setupConversationViewController]; + [baseViewController.navigationController pushViewController:self.viewController animated:YES]; + toolBar = (ATLMessageInputToolbar *)[tester waitForViewWithAccessibilityLabel:@"Message Input Toolbar"]; + expect(toolBar.mediaAttachments.count).to.equal(2); + ATLMediaAttachment *textAttachment = toolBar.mediaAttachments[0]; + expect(textAttachment.mediaMIMEType).to.equal(ATLMIMETypeTextPlain); + expect(textAttachment.textRepresentation).to.equal(@"Test"); + ATLMediaAttachment *imageAttachment = toolBar.mediaAttachments[1]; + expect(imageAttachment.mediaMIMEType).to.equal(ATLMIMETypeImageJPEG); +} +- (void)testToVerifyCachingSeveralMediaAttachments +{ [self setupConversationViewController]; - [self setRootViewController:self.viewController]; - id cell = [self.viewController collectionViewCellForMessage:(LYRMessage *)message3]; - expect([cell class]).to.beSubclassOf([ATLMessageCollectionViewCell class]); - expect([cell accessibilityLabel]).to.equal(@"Message: How are you3?"); + UIViewController *baseViewController = [UIViewController new]; + [self setRootViewController:baseViewController]; + [baseViewController.navigationController pushViewController:self.viewController animated:YES]; + [tester waitForAnimationsToFinish]; + + ATLMessageInputToolbar *toolBar = (ATLMessageInputToolbar *)[tester waitForViewWithAccessibilityLabel:@"Message Input Toolbar"]; + ATLMediaAttachment *textAttachment1 = [ATLMediaAttachment mediaAttachmentWithText:@"test1"]; + [toolBar insertMediaAttachment:textAttachment1 withEndLineBreak:YES]; + UIImage *image = ATLTestAttachmentMakeImageWithSize(CGSizeMake(1024, 512)); + ATLMediaAttachment *attachement = [ATLMediaAttachment mediaAttachmentWithImage:image metadata:nil thumbnailSize:100]; + [toolBar insertMediaAttachment:attachement withEndLineBreak:YES]; + ATLMediaAttachment *textAttachment2 = [ATLMediaAttachment mediaAttachmentWithText:@"test2"]; + [toolBar insertMediaAttachment:textAttachment2 withEndLineBreak:NO]; + + self.viewController = nil; + [baseViewController.navigationController dismissViewControllerAnimated:YES completion:nil]; + [tester waitForAnimationsToFinish]; + + [self setupConversationViewController]; + [baseViewController.navigationController pushViewController:self.viewController animated:YES]; + toolBar = (ATLMessageInputToolbar *)[tester waitForViewWithAccessibilityLabel:@"Message Input Toolbar"]; + expect(toolBar.mediaAttachments.count).to.equal(3); + ATLMediaAttachment *testTextAttachment1 = toolBar.mediaAttachments[0]; + expect(testTextAttachment1.mediaMIMEType).to.equal(ATLMIMETypeTextPlain); + expect(testTextAttachment1.textRepresentation).to.equal(@"test1"); + ATLMediaAttachment *imageAttachment = toolBar.mediaAttachments[1]; + expect(imageAttachment.mediaMIMEType).to.equal(ATLMIMETypeImageJPEG); + ATLMediaAttachment *testTextAttachment2 = toolBar.mediaAttachments[2]; + expect(testTextAttachment2.mediaMIMEType).to.equal(ATLMIMETypeTextPlain); + expect(testTextAttachment2.textRepresentation).to.equal(@"test2"); +} + +- (void)testToVerifyInputToolbarIsAppropriateWidth +{ + self.viewController = [ATLSampleConversationViewController conversationViewControllerWithLayerClient:(LYRClient *)self.testInterface.layerClient]; + + UIViewController *parentViewController = [UIViewController new]; + [parentViewController addChildViewController:self.viewController]; + [parentViewController.view addSubview:self.viewController.view]; + [self.viewController didMoveToParentViewController:parentViewController]; + self.viewController.view.frame = parentViewController.view.frame; + + [self setRootViewController:parentViewController]; + [tester waitForAnimationsToFinish]; + + CGRect frame = self.viewController.view.frame; + frame.size.width = frame.size.width/2; + self.viewController.view.frame = frame; + + ATLMessageInputToolbar *toolBar = (ATLMessageInputToolbar *)[tester waitForViewWithAccessibilityLabel:@"Message Input Toolbar"]; + [toolBar layoutIfNeeded]; + expect(toolBar.frame.size.width).will.equal(frame.size.width); } #pragma mark - ATLConversationViewControllerDelegate @@ -226,6 +336,35 @@ - (void)testToVerifyCustomMessageObjects [tester waitForViewWithAccessibilityLabel:testMessageText]; } +//- (void)conversationViewController:(ATLConversationViewController *)conversationViewController configureCell:(UICollectionViewCell *)cell forMessage:(LYRMessage *)message; +- (void)testToVerifyDelegateIsNotifiedOfCellCreation +{ + [self setupConversationViewController]; + [self setRootViewController:self.viewController]; + + id delegateMock = OCMProtocolMock(@protocol(ATLConversationViewControllerDelegate)); + self.viewController.delegate = delegateMock; + + [[[delegateMock expect] andDo:^(NSInvocation *invocation) { + ATLConversationViewController *controller; + [invocation getArgument:&controller atIndex:2]; + expect(controller).to.equal(self.viewController); + + UICollectionViewCell *cell; + [invocation getArgument:&cell atIndex:3]; + expect(cell).to.beKindOf([UICollectionViewCell class]); + + LYRMessage *message; + [invocation getArgument:&message atIndex:4]; + expect(message).to.beKindOf([LYRMessageMock class]); + + }] conversationViewController:[OCMArg any] configureCell:[OCMArg any] forMessage:[OCMArg any]]; + + [self sendMessageWithText:@"This is a test"]; + [tester tapViewWithAccessibilityLabel:@"Message: This is a test"]; + [delegateMock verify]; +} + - (void)testToVerityControllerDisplaysCorrectDataFromTheDataSource { [self setupConversationViewController]; @@ -294,6 +433,176 @@ - (void)testToVerifyAvatarImageIsDisplayedInGroupConversation [tester waitForViewWithAccessibilityLabel:ATLAvatarImageViewAccessibilityLabel]; } +- (void)testToVerifySenderNameIsDisplayedInGroupConversation +{ + ATLUserMock *mockUser2 = [ATLUserMock userWithMockUserName:ATLMockUserNameKevin]; + LYRClientMock *layerClient = [LYRClientMock layerClientMockWithAuthenticatedUserID:mockUser2.participantIdentifier]; + [self.conversation addParticipants:[NSSet setWithObject:mockUser2.participantIdentifier] error:nil]; + + LYRMessagePartMock *part = [LYRMessagePartMock messagePartWithText:@"Test"]; + LYRMessageMock *message = [layerClient newMessageWithParts:@[part] options:nil error:nil]; + [self.conversation sendMessage:message error:nil]; + + [self setupConversationViewController]; + [self setRootViewController:self.viewController]; + + UILabel *label = (UILabel *)[tester waitForViewWithAccessibilityLabel:ATLConversationViewHeaderIdentifier]; + expect(label.text).to.equal(mockUser2.fullName); +} + +- (void)testToVerifyPlatformMessageSenderNameIsDisplayedInGroupConversation +{ + ATLUserMock *mockUser2 = [ATLUserMock userWithMockUserName:ATLMockUserNameKevin]; + LYRClientMock *layerClient = [LYRClientMock layerClientMockWithAuthenticatedUserID:mockUser2.participantIdentifier]; + [self.conversation addParticipants:[NSSet setWithObject:mockUser2.participantIdentifier] error:nil]; + + LYRMessagePartMock *part = [LYRMessagePartMock messagePartWithText:@"Test"]; + LYRMessageMock *message = [layerClient newPlatformMessageWithParts:@[part] senderName:@"Platform" options:nil error:nil]; + [self.conversation sendMessage:message error:nil]; + + [self setupConversationViewController]; + [self setRootViewController:self.viewController]; + + UILabel *label = (UILabel *)[tester waitForViewWithAccessibilityLabel:ATLConversationViewHeaderIdentifier]; + expect(label.text).to.equal(@"Platform"); +} + +- (void)testToVerifyUserAvatarImageIsDisplayed +{ + LYRMessagePartMock *part = [LYRMessagePartMock messagePartWithText:@"Test"]; + LYRMessageMock *message = [self.testInterface.layerClient newMessageWithParts:@[part] options:nil error:nil]; + [self.conversation sendMessage:message error:nil]; + + [self setupConversationViewController]; + + self.viewController.shouldDisplayAvatarItemForOneOtherParticipant = YES; + self.viewController.shouldDisplayAvatarItemForAuthenticatedUser = YES; + + [self setRootViewController:self.viewController]; + + [tester waitForViewWithAccessibilityLabel:ATLAvatarImageViewAccessibilityLabel]; +} + +- (void)testToVerifyAvatarImageIsDisplayedOncePerSection +{ + NSTimeInterval oneMinuteTwoSecondsAgoInterval = -62; + NSTimeInterval oneSecondAgoInterval = -1; + + NSDate *now = [NSDate date]; + NSDate *oneSecondAgo = [now dateByAddingTimeInterval:oneSecondAgoInterval]; + NSDate *oneMinuteTwoSecondsAgo = [now dateByAddingTimeInterval:oneMinuteTwoSecondsAgoInterval]; + + LYRMessagePartMock *partOne = [LYRMessagePartMock messagePartWithText:@"One"]; + LYRMessageMock *messageOne = [self.testInterface.layerClient newMessageWithParts:@[partOne] options:nil error:nil]; + [self.conversation sendMessage:messageOne error:nil]; + messageOne.receivedAt = oneMinuteTwoSecondsAgo; + + LYRMessagePartMock *partTwo = [LYRMessagePartMock messagePartWithText:@"Two"]; + LYRMessageMock *messageTwo = [self.testInterface.layerClient newMessageWithParts:@[partTwo] options:nil error:nil]; + [self.conversation sendMessage:messageTwo error:nil]; + messageTwo.receivedAt = oneSecondAgo; + + LYRMessagePartMock *partThree = [LYRMessagePartMock messagePartWithText:@"Three"]; + LYRMessageMock *messageThree = [self.testInterface.layerClient newMessageWithParts:@[partThree] options:nil error:nil]; + [self.conversation sendMessage:messageThree error:nil]; + messageThree.receivedAt = now; + + [self setupConversationViewController]; + + self.viewController.shouldDisplayAvatarItemForOneOtherParticipant = YES; + self.viewController.shouldDisplayAvatarItemForAuthenticatedUser = YES; + self.viewController.avatarItemDisplayFrequency = ATLAvatarItemDisplayFrequencySection; + + [self setRootViewController:self.viewController]; + + ATLMessageCollectionViewCell *cellOne = (ATLMessageCollectionViewCell *)[tester waitForViewWithAccessibilityLabel:@"Message: One"]; + expect(cellOne.avatarImageView.hidden).to.equal(YES); + ATLMessageCollectionViewCell *cellTwo = (ATLMessageCollectionViewCell *)[tester waitForViewWithAccessibilityLabel:@"Message: Two"]; + expect(cellTwo.avatarImageView.hidden).to.equal(YES); + ATLMessageCollectionViewCell *cellThree = (ATLMessageCollectionViewCell *)[tester waitForViewWithAccessibilityLabel:@"Message: Three"]; + expect(cellThree.avatarImageView.hidden).to.equal(NO); +} + +- (void)testToVerifyAvatarImageIsDisplayedOncePerCluster +{ + NSTimeInterval oneMinuteTwoSecondsAgoInterval = -62; + NSTimeInterval oneSecondAgoInterval = -1; + + NSDate *now = [NSDate date]; + NSDate *oneSecondAgo = [now dateByAddingTimeInterval:oneSecondAgoInterval]; + NSDate *oneMinuteTwoSecondsAgo = [now dateByAddingTimeInterval:oneMinuteTwoSecondsAgoInterval]; + + LYRMessagePartMock *partOne = [LYRMessagePartMock messagePartWithText:@"One"]; + LYRMessageMock *messageOne = [self.testInterface.layerClient newMessageWithParts:@[partOne] options:nil error:nil]; + [self.conversation sendMessage:messageOne error:nil]; + messageOne.receivedAt = oneMinuteTwoSecondsAgo; + + LYRMessagePartMock *partTwo = [LYRMessagePartMock messagePartWithText:@"Two"]; + LYRMessageMock *messageTwo = [self.testInterface.layerClient newMessageWithParts:@[partTwo] options:nil error:nil]; + [self.conversation sendMessage:messageTwo error:nil]; + messageTwo.receivedAt = oneSecondAgo; + + LYRMessagePartMock *partThree = [LYRMessagePartMock messagePartWithText:@"Three"]; + LYRMessageMock *messageThree = [self.testInterface.layerClient newMessageWithParts:@[partThree] options:nil error:nil]; + [self.conversation sendMessage:messageThree error:nil]; + messageThree.receivedAt = now; + + [self setupConversationViewController]; + + self.viewController.shouldDisplayAvatarItemForOneOtherParticipant = YES; + self.viewController.shouldDisplayAvatarItemForAuthenticatedUser = YES; + self.viewController.avatarItemDisplayFrequency = ATLAvatarItemDisplayFrequencyCluster; + + [self setRootViewController:self.viewController]; + + ATLMessageCollectionViewCell *cellOne = (ATLMessageCollectionViewCell *)[tester waitForViewWithAccessibilityLabel:@"Message: One"]; + expect(cellOne.avatarImageView.hidden).to.equal(NO); + ATLMessageCollectionViewCell *cellTwo = (ATLMessageCollectionViewCell *)[tester waitForViewWithAccessibilityLabel:@"Message: Two"]; + expect(cellTwo.avatarImageView.hidden).to.equal(YES); + ATLMessageCollectionViewCell *cellThree = (ATLMessageCollectionViewCell *)[tester waitForViewWithAccessibilityLabel:@"Message: Three"]; + expect(cellThree.avatarImageView.hidden).to.equal(NO); +} + +- (void)testToVerifyAvatarImageIsDisplayedForEveryMessage +{ + NSTimeInterval oneMinuteTwoSecondsAgoInterval = -62; + NSTimeInterval oneSecondAgoInterval = -1; + + NSDate *now = [NSDate date]; + NSDate *oneSecondAgo = [now dateByAddingTimeInterval:oneSecondAgoInterval]; + NSDate *oneMinuteTwoSecondsAgo = [now dateByAddingTimeInterval:oneMinuteTwoSecondsAgoInterval]; + + LYRMessagePartMock *partOne = [LYRMessagePartMock messagePartWithText:@"One"]; + LYRMessageMock *messageOne = [self.testInterface.layerClient newMessageWithParts:@[partOne] options:nil error:nil]; + [self.conversation sendMessage:messageOne error:nil]; + messageOne.receivedAt = oneMinuteTwoSecondsAgo; + + LYRMessagePartMock *partTwo = [LYRMessagePartMock messagePartWithText:@"Two"]; + LYRMessageMock *messageTwo = [self.testInterface.layerClient newMessageWithParts:@[partTwo] options:nil error:nil]; + [self.conversation sendMessage:messageTwo error:nil]; + messageTwo.receivedAt = oneSecondAgo; + + LYRMessagePartMock *partThree = [LYRMessagePartMock messagePartWithText:@"Three"]; + LYRMessageMock *messageThree = [self.testInterface.layerClient newMessageWithParts:@[partThree] options:nil error:nil]; + [self.conversation sendMessage:messageThree error:nil]; + messageThree.receivedAt = now; + + [self setupConversationViewController]; + + self.viewController.shouldDisplayAvatarItemForOneOtherParticipant = YES; + self.viewController.shouldDisplayAvatarItemForAuthenticatedUser = YES; + self.viewController.avatarItemDisplayFrequency = ATLAvatarItemDisplayFrequencyAll; + + [self setRootViewController:self.viewController]; + + ATLMessageCollectionViewCell *cellOne = (ATLMessageCollectionViewCell *)[tester waitForViewWithAccessibilityLabel:@"Message: One"]; + expect(cellOne.avatarImageView.hidden).to.equal(NO); + ATLMessageCollectionViewCell *cellTwo = (ATLMessageCollectionViewCell *)[tester waitForViewWithAccessibilityLabel:@"Message: Two"]; + expect(cellTwo.avatarImageView.hidden).to.equal(NO); + ATLMessageCollectionViewCell *cellThree = (ATLMessageCollectionViewCell *)[tester waitForViewWithAccessibilityLabel:@"Message: Three"]; + expect(cellThree.avatarImageView.hidden).to.equal(NO); +} + - (void)testToVerifyCustomAvatarImageDiameter { [[ATLAvatarImageView appearance] setAvatarImageViewDiameter:40]; @@ -313,6 +622,157 @@ - (void)testToVerifyCustomAvatarImageDiameter expect(imageView.avatarImageViewDiameter).to.equal(40); } +- (void)testtoVerifyReloadingCellsDuringQueryControllerAnimationDoesNotRaise +{ + [self setupConversationViewController]; + [self setRootViewController:self.viewController]; + + id delegateMock = OCMProtocolMock(@protocol(ATLConversationViewControllerDelegate)); + self.viewController.delegate = delegateMock; + + [[[delegateMock expect] andDo:^(NSInvocation *invocation) { + ATLConversationViewController *controller; + [invocation getArgument:&controller atIndex:2]; + expect(controller).to.equal(self.viewController); + + LYRMessage *message; + [invocation getArgument:&message atIndex:3]; + expect(message).to.beKindOf([LYRMessageMock class]); + + expect(^{[self.viewController reloadCellForMessage:message];}).toNot.raise(NSInternalInconsistencyException); + }] conversationViewController:[OCMArg any] didSendMessage:[OCMArg any]]; + + [tester enterText:@"test" intoViewWithAccessibilityLabel:ATLMessageInputToolbarTextInputView]; + [tester tapViewWithAccessibilityLabel:ATLMessageInputToolbarSendButton]; + [delegateMock verify]; +} + +- (void)testToVerifyReloadingCellsForMutlitpleMessagesDoesNotRaise +{ + [self setupConversationViewController]; + [self setRootViewController:self.viewController]; + + [tester enterText:@"test" intoViewWithAccessibilityLabel:ATLMessageInputToolbarTextInputView]; + [tester tapViewWithAccessibilityLabel:ATLMessageInputToolbarSendButton]; + + [tester enterText:@"test" intoViewWithAccessibilityLabel:ATLMessageInputToolbarTextInputView]; + [tester tapViewWithAccessibilityLabel:ATLMessageInputToolbarSendButton]; + + [tester enterText:@"test" intoViewWithAccessibilityLabel:ATLMessageInputToolbarTextInputView]; + [tester tapViewWithAccessibilityLabel:ATLMessageInputToolbarSendButton]; + + id delegateMock = OCMProtocolMock(@protocol(ATLConversationViewControllerDelegate)); + self.viewController.delegate = delegateMock; + + [[[delegateMock expect] andDo:^(NSInvocation *invocation) { + ATLConversationViewController *controller; + [invocation getArgument:&controller atIndex:2]; + expect(controller).to.equal(self.viewController); + + LYRMessage *message; + [invocation getArgument:&message atIndex:3]; + expect(message).to.beKindOf([LYRMessageMock class]); + + expect(^{[self.viewController reloadCellsForMessagesSentByParticipantWithIdentifier:self.viewController.layerClient.authenticatedUserID];}).toNot.raise(NSInternalInconsistencyException); + }] conversationViewController:[OCMArg any] didSendMessage:[OCMArg any]]; + + [tester enterText:@"test" intoViewWithAccessibilityLabel:ATLMessageInputToolbarTextInputView]; + [tester tapViewWithAccessibilityLabel:ATLMessageInputToolbarSendButton]; + [delegateMock verify]; +} + +- (void)testToVerifyDefaultQueryConfigurationDataSourceMethod +{ + [self setupConversationViewController]; + [self setRootViewController:self.viewController]; + + id delegateMock = OCMProtocolMock(@protocol(ATLConversationViewControllerDataSource)); + self.viewController.dataSource = delegateMock; + + [[[delegateMock expect] andDo:^(NSInvocation *invocation) { + ATLConversationListViewController *controller; + [invocation getArgument:&controller atIndex:2]; + expect(controller).to.equal(self.viewController); + + LYRQuery *query; + [invocation getArgument:&query atIndex:3]; + expect(query).toNot.beNil(); + + [invocation setReturnValue:&query]; + }] conversationViewController:[OCMArg any] willLoadWithQuery:[OCMArg any]]; + + self.viewController.conversation = [self.viewController.layerClient newConversationWithParticipants:[NSSet setWithObject:@"test"] options:nil error:nil]; + [delegateMock verifyWithDelay:1]; +} + +- (void)testToVerifyQueryConfigurationTakesEffect +{ + [self setupConversationViewController]; + [self setRootViewController:self.viewController]; + + id delegateMock = OCMProtocolMock(@protocol(ATLConversationViewControllerDataSource)); + self.viewController.dataSource = delegateMock; + + __block NSSortDescriptor *sortDescriptor = [NSSortDescriptor sortDescriptorWithKey:@"identifier" ascending:YES]; + [[[delegateMock expect] andDo:^(NSInvocation *invocation) { + ATLConversationListViewController *controller; + [invocation getArgument:&controller atIndex:2]; + expect(controller).to.equal(self.viewController); + + LYRQuery *query; + [invocation getArgument:&query atIndex:3]; + expect(query).toNot.beNil(); + + query.sortDescriptors = @[sortDescriptor]; + [invocation setReturnValue:&query]; + }] conversationViewController:[OCMArg any] willLoadWithQuery:[OCMArg any]]; + + self.viewController.conversation = [self.viewController.layerClient newConversationWithParticipants:[NSSet setWithObject:@"test"] options:nil error:nil]; + [delegateMock verifyWithDelay:1]; + + expect(self.viewController.conversationDataSource.queryController.query.sortDescriptors).to.contain(sortDescriptor); +} + +- (void)testToVerifyControllerAssertsIfNoQueryIsReturned +{ + [self setupConversationViewController]; + [self setRootViewController:self.viewController]; + + id delegateMock = OCMProtocolMock(@protocol(ATLConversationViewControllerDataSource)); + self.viewController.dataSource = delegateMock; + + expect(^{ + [[[delegateMock expect] andDo:^(NSInvocation *invocation) { + ATLConversationListViewController *controller; + [invocation getArgument:&controller atIndex:2]; + expect(controller).to.equal(self.viewController); + + LYRQuery *query; + [invocation getArgument:&query atIndex:3]; + expect(query).toNot.beNil(); + + }] conversationViewController:[OCMArg any] willLoadWithQuery:[OCMArg any]]; + + self.viewController.conversation = [self.viewController.layerClient newConversationWithParticipants:[NSSet setWithObject:@"test"] options:nil error:nil]; + [delegateMock verifyWithDelay:1]; + }).to.raise(NSInvalidArgumentException); +} + +- (void)testToVerifySendingWhitespaceDoesNotSendLocation +{ + [self setupConversationViewController]; + [self setRootViewController:self.viewController]; + + id viewControllerMock = OCMPartialMock(self.viewController); + + [[[viewControllerMock stub] andDo:^(NSInvocation *invocation) { + failure(@"Shouldn't call send location message"); + }] sendLocationMessage]; + + [tester enterText:@" " intoViewWithAccessibilityLabel:ATLMessageInputToolbarAccessibilityLabel]; + [tester tapViewWithAccessibilityLabel:ATLMessageInputToolbarSendButton]; +} + - (void)setupConversationViewController { self.viewController = [ATLSampleConversationViewController conversationViewControllerWithLayerClient:(LYRClient *)self.testInterface.layerClient]; @@ -332,20 +792,20 @@ - (void)sendMessageWithText:(NSString *)messageText - (void)sendPhotoMessage { - UIImage *image = [UIImage imageNamed:@"test-logo"]; + UIImage *image = ATLTestAttachmentMakeImageWithSize(CGSizeMake(1024, 512)); ATLMediaAttachment *attachement = [ATLMediaAttachment mediaAttachmentWithImage:image metadata:nil thumbnailSize:100]; NSError *error; LYRMessageMock *message = [self.testInterface.layerClient newMessageWithParts:ATLMessagePartsWithMediaAttachment(attachement) options:nil error:&error]; expect(error).to.beNil; [self.conversation sendMessage:message error:&error]; expect(error).to.beNil; - [tester waitForViewWithAccessibilityLabel:[NSString stringWithFormat:@"Message: Photo"]]; + [tester waitForViewWithAccessibilityLabel:[NSString stringWithFormat:@"Message: Image"]]; } - (void)setRootViewController:(UIViewController *)controller { [self.testInterface presentViewController:controller]; - [tester waitForTimeInterval:1.0]; // Allow controller to be presented. + [tester waitForAnimationsToFinish]; } @end diff --git a/Tests/ATLMediaAttachmentTests.m b/Tests/ATLMediaAttachmentTests.m new file mode 100644 index 000000000..a3cdc419b --- /dev/null +++ b/Tests/ATLMediaAttachmentTests.m @@ -0,0 +1,416 @@ +// +// ATLMediaAttachmentTests.m +// Atlas +// +// Created by Klemen Verdnik on 2/26/15. +// Copyright (c) 2015 Layer, Inc. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +#import +#import +#import +#import "ATLMediaAttachment.h" +#import "ATLTestUtilities.h" +#import "ATLMediaInputStream.h" + +#define EXP_SHORTHAND +#import +#import + +@interface ATLMediaAttachmentTests : XCTestCase + +@end + +@implementation ATLMediaAttachmentTests + +- (void)setUp +{ + [super setUp]; +} + +- (void)tearDown +{ + [super tearDown]; +} + +- (void)testMediaAttachmentInitFailures +{ + expect(^{ + __unused ATLMediaAttachment *mediaAttachment = [[ATLMediaAttachment alloc] init]; + }).to.raiseWithReason(NSInternalInconsistencyException, @"Failed to call designated initializer. Use one of the following initialiers: mediaAttachmentWithAssetURL:thumbnailSize:, mediaAttachmentWithImage:metadata:thumbnailSize:, mediaAttachmentWithText:, mediaAttachmentWithLocation:"); + + expect(^{ + __unused ATLMediaAttachment *mediaAttachment = [ATLMediaAttachment mediaAttachmentWithAssetURL:nil thumbnailSize:0]; + }).to.raiseWithReason(NSInternalInconsistencyException, @"Cannot initialize ATLMediaAttachment with `nil` assetURL."); + + expect(^{ + __unused ATLMediaAttachment *mediaAttachment = [ATLMediaAttachment mediaAttachmentWithImage:nil metadata:nil thumbnailSize:0]; + }).to.raiseWithReason(NSInternalInconsistencyException, @"Cannot initialize ATLMediaAttachment with `nil` image."); + + expect(^{ + __unused ATLMediaAttachment *mediaAttachment = [ATLMediaAttachment mediaAttachmentWithText:nil]; + }).to.raiseWithReason(NSInternalInconsistencyException, @"Cannot initialize ATLMediaAttachment with `nil` text."); + + expect(^{ + __unused ATLMediaAttachment *mediaAttachment = [ATLMediaAttachment mediaAttachmentWithLocation:nil]; + }).to.raiseWithReason(NSInternalInconsistencyException, @"Cannot initialize ATLMediaAttachment with `nil` location."); +} + +#pragma mark Tests for Media Attachment With Text + +- (void)testMediaAttachmentWithText +{ + ATLMediaAttachment *mediaAttachment = [ATLMediaAttachment mediaAttachmentWithText:@"What about the Noodle Incident?"]; + + // Verifying properties + expect(mediaAttachment).toNot.beNil(); + expect(NSStringFromClass(mediaAttachment.class)).to.equal(@"ATLTextMediaAttachment"); + expect(mediaAttachment.textRepresentation).to.equal(@"What about the Noodle Incident?"); + expect(mediaAttachment.thumbnailSize).to.equal(0); + expect(mediaAttachment.mediaMIMEType).to.equal(@"text/plain"); + expect(mediaAttachment.mediaInputStream).toNot.beNil(); + expect(mediaAttachment.mediaInputStream.streamStatus).to.equal(NSStreamStatusNotOpen); + expect(mediaAttachment.thumbnailMIMEType).to.beNil(); + expect(mediaAttachment.thumbnailInputStream).to.beNil(); + expect(mediaAttachment.metadataMIMEType).to.beNil(); + expect(mediaAttachment.metadataInputStream).to.beNil(); + + // Verifying stream content + NSData *payload = ATLTestAttachmentDataFromStream(mediaAttachment.mediaInputStream); + expect(payload).toNot.beNil(); + expect(payload).to.equal([@"What about the Noodle Incident?" dataUsingEncoding:NSUTF8StringEncoding]); +} + +#pragma mark Tests for Media Attachment With Location + +- (void)testMediaAttachmentWithLocation +{ + // Create a test location, which will be serialized. + CLLocation *location = [[CLLocation alloc] initWithLatitude:46.368383 longitude:15.106631]; + ATLMediaAttachment *mediaAttachment = [ATLMediaAttachment mediaAttachmentWithLocation:location]; + + // Verifying properties + expect(mediaAttachment).toNot.beNil(); + expect(NSStringFromClass(mediaAttachment.class)).to.equal(@"ATLLocationMediaAttachment"); + expect(mediaAttachment.textRepresentation).to.equal(@"Attachment: Location"); + expect(mediaAttachment.thumbnailSize).to.equal(0); + expect(mediaAttachment.mediaMIMEType).to.equal(@"location/coordinate"); + expect(mediaAttachment.mediaInputStream).toNot.beNil(); + expect(mediaAttachment.mediaInputStream.streamStatus).to.equal(NSStreamStatusNotOpen); + expect(mediaAttachment.thumbnailMIMEType).to.beNil(); + expect(mediaAttachment.thumbnailInputStream).to.beNil(); + expect(mediaAttachment.metadataMIMEType).to.beNil(); + expect(mediaAttachment.metadataInputStream).to.beNil(); + + // Verifying stream content + NSData *payload = ATLTestAttachmentDataFromStream(mediaAttachment.mediaInputStream); + expect(payload).toNot.beNil(); + expect([NSJSONSerialization JSONObjectWithData:payload options:NSJSONReadingAllowFragments error:nil]).to.equal(@{ @"lat": @(location.coordinate.latitude), @"lon": @(location.coordinate.longitude) }); +} + +#pragma mark Tests for Media Attachment With Images + +/** + @warning Make sure you allowed the XCTestCase to access the photo library. + It's a manual process on the UI in the simulator. + */ +- (void)testMediaAttachmentWithImageFromAsset +{ + // Generate a test image and put it into the photo library. + UIImage *image = ATLTestAttachmentMakeImageWithSize(CGSizeMake(1024, 512)); + ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init]; + __block NSURL *assetURL; + [library writeImageToSavedPhotosAlbum:image.CGImage metadata:@{ @"Orientation": @(UIImageOrientationUp) } completionBlock:^(NSURL *outAssetURL, NSError *error) { + assetURL = outAssetURL; + }]; + expect(assetURL).willNot.beNil(); + + ATLMediaAttachment *mediaAttachment = [ATLMediaAttachment mediaAttachmentWithAssetURL:assetURL thumbnailSize:512]; + + // Verifying properties + expect(mediaAttachment).toNot.beNil(); + expect(NSStringFromClass(mediaAttachment.class)).to.equal(@"ATLAssetMediaAttachment"); + expect(mediaAttachment.textRepresentation).to.equal(@"Attachment: Image"); + expect(mediaAttachment.thumbnailSize).to.equal(512); + expect(mediaAttachment.mediaMIMEType).to.equal(@"image/jpeg"); + expect(mediaAttachment.mediaInputStream).toNot.beNil(); + expect(mediaAttachment.mediaInputStream.streamStatus).to.equal(NSStreamStatusNotOpen); + expect(mediaAttachment.thumbnailMIMEType).to.equal(@"image/jpeg+preview"); + expect(mediaAttachment.thumbnailInputStream).toNot.beNil(); + expect(mediaAttachment.metadataMIMEType).to.equal(@"application/json+imageSize"); + expect(mediaAttachment.metadataInputStream).toNot.beNil(); + + // Verifying stream content + NSData *mediaPayload = ATLTestAttachmentDataFromStream(mediaAttachment.mediaInputStream); + expect(mediaPayload).toNot.beNil(); + + [mediaAttachment.mediaInputStream close]; + expect(mediaAttachment.mediaInputStream.streamError).to.beNil(); + expect(mediaAttachment.mediaInputStream.streamStatus).to.equal(NSStreamStatusClosed); + + UIImage *processedImage = [UIImage imageWithData:mediaPayload]; + expect(processedImage.size).to.equal(CGSizeMake(1024, 512)); + expect(processedImage.imageOrientation).to.equal(UIImageOrientationUp); + + NSURL *outputStreamedImageURL = [NSURL fileURLWithPath:[NSTemporaryDirectory() stringByAppendingPathComponent:@"streamed-image.jpeg"]]; + [[NSFileManager defaultManager] removeItemAtURL:outputStreamedImageURL error:nil]; + [mediaPayload writeToURL:outputStreamedImageURL atomically:NO]; + NSLog(@"Output file at path:'%@' length=%lu", outputStreamedImageURL.path, mediaPayload.length); + + // Verifying thumbnail content + NSData *thumbnailPayload = ATLTestAttachmentDataFromStream(mediaAttachment.thumbnailInputStream); + expect(thumbnailPayload).toNot.beNil(); + processedImage = [UIImage imageWithData:thumbnailPayload]; + expect(processedImage.size).to.equal(CGSizeMake(512, 256)); + expect(processedImage.imageOrientation).to.equal(UIImageOrientationUp); + + // Verifying image metadata JSON + NSData *imageSizeMetadataJSON = ATLTestAttachmentDataFromStream(mediaAttachment.metadataInputStream); + expect(imageSizeMetadataJSON).toNot.beNil(); + expect([NSJSONSerialization JSONObjectWithData:imageSizeMetadataJSON options:NSJSONReadingAllowFragments error:nil]).to.equal(@{ @"width": @1024, @"height": @512, @"orientation": @(UIImageOrientationUp) }); +} + +- (void)testMediaAttachmentWithImageFromMemory +{ + // Generate a test image for an in memory image streaming. + UIImage *image = ATLTestAttachmentMakeImageWithSize(CGSizeMake(1024, 512)); + ATLMediaAttachment *mediaAttachment = [ATLMediaAttachment mediaAttachmentWithImage:image metadata:@{ @"Orientation": @(UIImageOrientationUp) } thumbnailSize:512]; + + // Verifying properties + expect(mediaAttachment).toNot.beNil(); + expect(NSStringFromClass(mediaAttachment.class)).to.equal(@"ATLImageMediaAttachment"); + expect(mediaAttachment.textRepresentation).to.equal(@"Attachment: Image"); + expect(mediaAttachment.thumbnailSize).to.equal(512); + expect(mediaAttachment.mediaMIMEType).to.equal(@"image/jpeg"); + expect(mediaAttachment.mediaInputStream).toNot.beNil(); + expect(mediaAttachment.mediaInputStream.streamStatus).to.equal(NSStreamStatusNotOpen); + expect(mediaAttachment.thumbnailMIMEType).to.equal(@"image/jpeg+preview"); + expect(mediaAttachment.thumbnailInputStream).toNot.beNil(); + expect(mediaAttachment.metadataMIMEType).to.equal(@"application/json+imageSize"); + expect(mediaAttachment.metadataInputStream).toNot.beNil(); + + // Verifying stream content + NSData *mediaPayload = ATLTestAttachmentDataFromStream(mediaAttachment.mediaInputStream); + expect(mediaPayload).toNot.beNil(); + + [mediaAttachment.mediaInputStream close]; + expect(mediaAttachment.mediaInputStream.streamError).to.beNil(); + expect(mediaAttachment.mediaInputStream.streamStatus).to.equal(NSStreamStatusClosed); + + UIImage *processedImage = [UIImage imageWithData:mediaPayload]; + expect(processedImage.size).to.equal(CGSizeMake(1024, 512)); + expect(processedImage.imageOrientation).to.equal(UIImageOrientationUp); + + NSURL *outputStreamedImageURL = [NSURL fileURLWithPath:[NSTemporaryDirectory() stringByAppendingPathComponent:@"streamed-image.jpeg"]]; + [[NSFileManager defaultManager] removeItemAtURL:outputStreamedImageURL error:nil]; + [mediaPayload writeToURL:outputStreamedImageURL atomically:NO]; + NSLog(@"Output file at path:'%@' length=%lu", outputStreamedImageURL.path, mediaPayload.length); + + // Verifying thumbnail content + NSData *thumbnailPayload = ATLTestAttachmentDataFromStream(mediaAttachment.thumbnailInputStream); + expect(thumbnailPayload).toNot.beNil(); + processedImage = [UIImage imageWithData:thumbnailPayload]; + expect(processedImage.size).to.equal(CGSizeMake(512, 256)); + expect(processedImage.imageOrientation).to.equal(UIImageOrientationUp); + + // Verifying image metadata JSON + NSData *imageSizeMetadataJSON = ATLTestAttachmentDataFromStream(mediaAttachment.metadataInputStream); + expect(imageSizeMetadataJSON).toNot.beNil(); + expect([NSJSONSerialization JSONObjectWithData:imageSizeMetadataJSON options:NSJSONReadingAllowFragments error:nil]).to.equal(@{ @"width": @1024, @"height": @512, @"orientation": @(UIImageOrientationUp) }); +} + +- (void)testMediaAttachmentWithImageFromFile +{ + // Generate a test image at a temporary path. + NSURL *imageFileURL = [NSURL fileURLWithPath:[NSTemporaryDirectory() stringByAppendingPathComponent:@"test-image.jpeg"]]; + [[NSFileManager defaultManager] removeItemAtURL:imageFileURL error:nil]; + UIImage *generatedTestImage = ATLTestAttachmentMakeImageWithSize(CGSizeMake(1920, 1080)); + NSData *imageData = UIImageJPEGRepresentation(generatedTestImage, 1.0); + [imageData writeToURL:imageFileURL atomically:NO]; + expect([[NSFileManager defaultManager] fileExistsAtPath:imageFileURL.path]).to.beTruthy(); + + //check properties of ATLMediaAttachment Object + ATLMediaAttachment *mediaAttachment = [ATLMediaAttachment mediaAttachmentWithFileURL:imageFileURL thumbnailSize:512]; + expect(mediaAttachment).toNot.beNil(); + expect(NSStringFromClass(mediaAttachment.class)).to.equal(@"ATLAssetMediaAttachment"); + expect(mediaAttachment.textRepresentation).to.equal(@"Attachment: Image"); + expect(mediaAttachment.thumbnailSize).to.equal(512); + expect(mediaAttachment.mediaMIMEType).to.equal(@"image/jpeg"); + expect(mediaAttachment.mediaInputStream).toNot.beNil(); + expect(mediaAttachment.mediaInputStream.streamStatus).to.equal(NSStreamStatusNotOpen); + expect(mediaAttachment.thumbnailMIMEType).to.equal(@"image/jpeg+preview"); + expect(mediaAttachment.thumbnailInputStream).toNot.beNil(); + expect(mediaAttachment.metadataMIMEType).to.equal(@"application/json+imageSize"); + expect(mediaAttachment.metadataInputStream).toNot.beNil(); + + // Verifying stream content + NSInputStream *stream = mediaAttachment.mediaInputStream; + [stream open]; + expect(stream.streamStatus).to.equal(NSStreamStatusOpen); + expect(stream.streamError).to.beNil(); + + NSData *mediaPayload = ATLTestAttachmentDataFromStream(mediaAttachment.mediaInputStream); + expect(mediaPayload).toNot.beNil(); + + [stream close]; + expect(stream.streamError).to.beNil(); + expect(stream.streamStatus).to.equal(NSStreamStatusClosed); + + UIImage *processedImage = [UIImage imageWithData:mediaPayload]; + expect(processedImage.size).to.equal(CGSizeMake(1920, 1080)); + expect(processedImage.imageOrientation).to.equal(UIImageOrientationUp); + + NSURL *outputStreamedImageURL = [NSURL fileURLWithPath:[NSTemporaryDirectory() stringByAppendingPathComponent:@"streamed-image.jpeg"]]; + [[NSFileManager defaultManager] removeItemAtURL:outputStreamedImageURL error:nil]; + [mediaPayload writeToURL:outputStreamedImageURL atomically:NO]; + NSLog(@"Output file at path:'%@' length=%lu", outputStreamedImageURL.path, mediaPayload.length); + + // Verifying thumbnail content + NSData *thumbnailPayload = ATLTestAttachmentDataFromStream(mediaAttachment.thumbnailInputStream); + expect(thumbnailPayload).toNot.beNil(); + processedImage = [UIImage imageWithData:thumbnailPayload]; + expect(processedImage.size).to.equal(CGSizeMake(512, 288)); + expect(processedImage.imageOrientation).to.equal(UIImageOrientationUp); + + // Verifying image metadata JSON + NSData *imageSizeMetadataJSON = ATLTestAttachmentDataFromStream(mediaAttachment.metadataInputStream); + expect(imageSizeMetadataJSON).toNot.beNil(); + expect([NSJSONSerialization JSONObjectWithData:imageSizeMetadataJSON options:NSJSONReadingAllowFragments error:nil]).to.equal(@{ @"width": @1920, @"height": @1080, @"orientation": @(UIImageOrientationUp) }); +} + +#pragma mark Tests for Media Attachment With Videos + +/** + @warning Make sure you allowed the XCTestCase to access the photo library. + It's a manual process on the UI in the simulator. + */ +- (void)testMediaAttachmentWithVideoFromAsset +{ + // Generate a test video and put in into the library. + NSURL *videoFileURL = [NSURL fileURLWithPath:[NSTemporaryDirectory() stringByAppendingPathComponent:@"temporary-test-video.mov"]]; + BOOL success = ATLTestMakeVideo(videoFileURL, CGSizeMake(1280, 720), 30, 2); + expect(success).to.beTruthy(); + ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init]; + __block NSURL *assetURL; + [library writeVideoAtPathToSavedPhotosAlbum:videoFileURL completionBlock:^(NSURL *outAssetURL, NSError *error) { + assetURL = outAssetURL; + }]; + expect(assetURL).willNot.beNil(); + + // Get Last Video + ALAsset *sourceAsset = ATLVideoAssetTestObtainLastVideoFromAssetLibrary(library); + expect(sourceAsset).toNot.beNil(); + NSURL *lastVideoURL = sourceAsset.defaultRepresentation.url; + + //check properties of ATLMediaAttachment Object + ATLMediaAttachment *mediaAttachment = [ATLMediaAttachment mediaAttachmentWithAssetURL:lastVideoURL thumbnailSize:512]; + expect(mediaAttachment).toNot.beNil(); + expect(NSStringFromClass(mediaAttachment.class)).to.equal(@"ATLAssetMediaAttachment"); + expect(mediaAttachment.textRepresentation).to.equal(@"Attachment: Video"); + expect(mediaAttachment.thumbnailSize).to.equal(512); + expect(mediaAttachment.mediaMIMEType).to.equal(@"video/mp4"); + expect(mediaAttachment.mediaInputStream).toNot.beNil(); + expect(mediaAttachment.mediaInputStream.streamStatus).to.equal(NSStreamStatusNotOpen); + expect(mediaAttachment.thumbnailMIMEType).to.equal(@"image/jpeg+preview"); + expect(mediaAttachment.thumbnailInputStream).toNot.beNil(); + expect(mediaAttachment.metadataMIMEType).to.equal(@"application/json+imageSize"); + expect(mediaAttachment.metadataInputStream).toNot.beNil(); + + // Verifying stream content + NSInputStream *stream = mediaAttachment.mediaInputStream; + [stream open]; + expect(stream.streamStatus).to.equal(NSStreamStatusOpen); + expect(stream.streamError).to.beNil(); + + NSData *mediaPayload = ATLTestAttachmentDataFromStream(mediaAttachment.mediaInputStream); + [stream close]; + expect(stream.streamError).to.beNil(); + expect(stream.streamStatus).to.equal(NSStreamStatusClosed); + expect(mediaPayload).toNot.beNil(); + expect(mediaPayload.length).to.beGreaterThan(0); + + NSURL *outputStreamedVideoURL = [NSURL fileURLWithPath:[NSTemporaryDirectory() stringByAppendingPathComponent:@"streamed-video.mp4"]]; + [[NSFileManager defaultManager] removeItemAtURL:outputStreamedVideoURL error:nil]; + [mediaPayload writeToURL:outputStreamedVideoURL atomically:NO]; + NSLog(@"Output file at path:'%@' length=%lu", outputStreamedVideoURL.path, mediaPayload.length); + + // Verifying thumbnail content + NSData *thumbnailPayload = ATLTestAttachmentDataFromStream(mediaAttachment.thumbnailInputStream); + expect(thumbnailPayload).toNot.beNil(); + UIImage *processedImage = [UIImage imageWithData:thumbnailPayload]; + expect(processedImage.size).toNot.beNil(); + expect(processedImage.size).to.equal(CGSizeMake(512, 288)); + expect(processedImage.imageOrientation).to.equal(UIImageOrientationUp); + + // Verifying image metadata JSON + NSData *videoSizeMetadataJSON = ATLTestAttachmentDataFromStream(mediaAttachment.metadataInputStream); + expect(videoSizeMetadataJSON).toNot.beNil(); + expect([NSJSONSerialization JSONObjectWithData:videoSizeMetadataJSON options:NSJSONReadingAllowFragments error:nil]).to.equal(@{ @"width": @1280, @"height": @720, @"orientation": @(UIImageOrientationUp) }); +} + +- (void)testMediaAttachmentWithVideoFromFile +{ + // Generate a test video and put in into the library. + NSURL *videoFileURL = [NSURL fileURLWithPath:[NSTemporaryDirectory() stringByAppendingPathComponent:@"temporary-test-video.mov"]]; + BOOL success = ATLTestMakeVideo(videoFileURL, CGSizeMake(1280, 720), 30, 2); + expect(success).to.beTruthy(); + + // Check properties of ATLMediaAttachment Object + ATLMediaAttachment *mediaAttachment = [ATLMediaAttachment mediaAttachmentWithFileURL:videoFileURL thumbnailSize:512]; + expect(mediaAttachment).toNot.beNil(); + expect(NSStringFromClass(mediaAttachment.class)).to.equal(@"ATLAssetMediaAttachment"); + expect(mediaAttachment.textRepresentation).to.equal(@"Attachment: Video"); + expect(mediaAttachment.thumbnailSize).to.equal(512); + expect(mediaAttachment.mediaMIMEType).to.equal(@"video/mp4"); + expect(mediaAttachment.mediaInputStream).toNot.beNil(); + expect(mediaAttachment.mediaInputStream.streamStatus).to.equal(NSStreamStatusNotOpen); + expect(mediaAttachment.thumbnailMIMEType).to.equal(@"image/jpeg+preview"); + expect(mediaAttachment.thumbnailInputStream).toNot.beNil(); + expect(mediaAttachment.metadataMIMEType).to.equal(@"application/json+imageSize"); + expect(mediaAttachment.metadataInputStream).toNot.beNil(); + + // Verifying stream content + NSInputStream *stream = mediaAttachment.mediaInputStream; + [stream open]; + expect(stream.streamStatus).to.equal(NSStreamStatusOpen); + expect(stream.streamError).to.beNil(); + + NSData *mediaPayload = ATLTestAttachmentDataFromStream(mediaAttachment.mediaInputStream); + [stream close]; + expect(stream.streamError).to.beNil(); + expect(stream.streamStatus).to.equal(NSStreamStatusClosed); + expect(mediaPayload).toNot.beNil(); + expect(mediaPayload.length).to.beGreaterThan(0); + + NSURL *outputStreamedVideoURL = [NSURL fileURLWithPath:[NSTemporaryDirectory() stringByAppendingPathComponent:@"streamed-video.mp4"]]; + [[NSFileManager defaultManager] removeItemAtURL:outputStreamedVideoURL error:nil]; + [mediaPayload writeToURL:outputStreamedVideoURL atomically:NO]; + NSLog(@"Output file at path:'%@' length=%lu", outputStreamedVideoURL.path, mediaPayload.length); + + // Verifying thumbnail content + NSData *thumbnailPayload = ATLTestAttachmentDataFromStream(mediaAttachment.thumbnailInputStream); + expect(thumbnailPayload).toNot.beNil(); + UIImage *processedImage = [UIImage imageWithData:thumbnailPayload]; + expect(processedImage.size).toNot.beNil(); + expect(processedImage.size).to.equal(CGSizeMake(512, 288)); + expect(processedImage.imageOrientation).to.equal(UIImageOrientationUp); + + // Verifying image metadata JSON + NSData *videoSizeMetadataJSON = ATLTestAttachmentDataFromStream(mediaAttachment.metadataInputStream); + expect(videoSizeMetadataJSON).toNot.beNil(); + expect([NSJSONSerialization JSONObjectWithData:videoSizeMetadataJSON options:NSJSONReadingAllowFragments error:nil]).to.equal(@{ @"width": @1280, @"height": @720, @"orientation": @(UIImageOrientationRight) }); +} + +@end diff --git a/Tests/ATLMediaStreamTests.m b/Tests/ATLMediaStreamTests.m index 6697127ac..ec0f2521b 100644 --- a/Tests/ATLMediaStreamTests.m +++ b/Tests/ATLMediaStreamTests.m @@ -22,68 +22,31 @@ #import #import #import "ATLMediaInputStream.h" +#import "ATLTestUtilities.h" #define EXP_SHORTHAND #import #import -ALAsset *ATLAssetTestObtainLastImageFromAssetLibrary(ALAssetsLibrary *library) -{ - dispatch_semaphore_t semaphore = dispatch_semaphore_create(0); - dispatch_queue_t asyncQueue = dispatch_queue_create("com.layer.ATLMediaStreamTest.ObtainLastImage.async", DISPATCH_QUEUE_CONCURRENT); - - __block ALAsset *sourceAsset; - dispatch_async(asyncQueue, ^{ - [library enumerateGroupsWithTypes:ALAssetsGroupSavedPhotos usingBlock:^(ALAssetsGroup *group, BOOL *stop) { - if (!group) { - *stop = YES; - dispatch_semaphore_signal(semaphore); - return; - } - [group setAssetsFilter:[ALAssetsFilter allPhotos]]; - if ([group numberOfAssets] == 0) { - *stop = YES; - dispatch_semaphore_signal(semaphore); - return; - } - [group enumerateAssetsWithOptions:NSEnumerationReverse usingBlock:^(ALAsset *result, NSUInteger index, BOOL *innerStop) { - *innerStop = YES; - *stop = YES; - if (!result) { - return; - } - sourceAsset = result; - }]; - } failureBlock:^(NSError *error) { - dispatch_semaphore_signal(semaphore); - }]; - }); - dispatch_semaphore_wait(semaphore, DISPATCH_TIME_FOREVER); - return sourceAsset; -} - -@interface ATLMediaStreamTest : XCTestCase +@interface ATLMediaInputStreamTest : XCTestCase @end -@implementation ATLMediaStreamTest +@implementation ATLMediaInputStreamTest -- (void)setUp { +- (void)setUp +{ [super setUp]; } -- (void)tearDown { +- (void)tearDown +{ [super tearDown]; } -- (void)testMediaStreamDesignatedInitFails -{ - expect(^{ - __unused ATLMediaInputStream *streamDirect = [[ATLMediaInputStream alloc] init]; - }).to.raise(NSInternalInconsistencyException); -} +#pragma mark - Photo Asset Input Stream -- (void)testMediaStreamOpensStream +- (void)testMediaStreamOpensStreamForPhotoAsset { ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init]; ALAsset *sourceAsset = ATLAssetTestObtainLastImageFromAssetLibrary(library); @@ -110,7 +73,7 @@ - (void)testMediaStreamOpensStream expect(streamResample.streamError).to.beNil(); } -- (void)testMediaStreamClosesStream +- (void)testMediaStreamClosesStreamForPhotoAsset { ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init]; ALAsset *sourceAsset = ATLAssetTestObtainLastImageFromAssetLibrary(library); @@ -141,7 +104,7 @@ - (void)testMediaStreamClosesStream expect(streamResample.streamError).to.beNil(); } -- (void)testMediaStreamReadsStream +- (void)testMediaStreamReadsPhotoAsset { ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init]; ALAsset *sourceAsset = ATLAssetTestObtainLastImageFromAssetLibrary(library); @@ -175,10 +138,11 @@ - (void)testMediaStreamReadsStream NSLog(@"check file: %@ length=%lu", path, data.length); } -- (void)testMediaStreamReadsStreamFromDifferentThread +- (void)testMediaStreamReadsStreamForPhotoAssetFromDifferentThread { ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init]; ALAsset *sourceAsset = ATLAssetTestObtainLastImageFromAssetLibrary(library); + expect(sourceAsset).toNot.beNil(); NSURL *lastImageURL = sourceAsset.defaultRepresentation.url; @@ -211,4 +175,423 @@ - (void)testMediaStreamReadsStreamFromDifferentThread NSLog(@"check file: %@ length=%lu", path, data.length); } +#pragma mark - Photo File Input Stream + +- (void)testMediaStreamOpensStreamForPhotoFile +{ + // Generate a test image at a temporary path. + NSURL *imageFileURL = [NSURL fileURLWithPath:[NSTemporaryDirectory() stringByAppendingPathComponent:@"test-image.jpeg"]]; + [[NSFileManager defaultManager] removeItemAtURL:imageFileURL error:nil]; + UIImage *generatedTestImage = ATLTestAttachmentMakeImageWithSize(CGSizeMake(1920, 1080)); + NSData *imageData = UIImageJPEGRepresentation(generatedTestImage, 1.0); + [imageData writeToURL:imageFileURL atomically:NO]; + expect([[NSFileManager defaultManager] fileExistsAtPath:imageFileURL.path]).to.beTruthy(); + + // Try opening a stream for direct photo streaming. + ATLMediaInputStream *streamDirect = [ATLMediaInputStream mediaInputStreamWithFileURL:imageFileURL]; + expect(streamDirect.isLossless).to.beTruthy(); + expect(streamDirect.streamStatus).to.equal(NSStreamStatusNotOpen); + expect(streamDirect.streamError).to.beNil(); + [streamDirect open]; + expect(streamDirect.streamStatus).to.equal(NSStreamStatusOpen); + expect(streamDirect.streamError).to.beNil(); + + // Try opening a stream for resampled photo streaming. + ATLMediaInputStream *streamResample = [ATLMediaInputStream mediaInputStreamWithFileURL:imageFileURL]; + streamResample.maximumSize = 512; + streamResample.compressionQuality = 0.5f; + expect(streamResample.isLossless).to.beFalsy(); + expect(streamResample.streamStatus).to.equal(NSStreamStatusNotOpen); + expect(streamResample.streamError).to.beNil(); + [streamResample open]; + expect(streamResample.streamStatus).to.equal(NSStreamStatusOpen); + expect(streamResample.streamError).to.beNil(); +} + +- (void)testMediaStreamClosesStreamForPhotoFile +{ + // Generate a test image at a temporary path. + NSURL *imageFileURL = [NSURL fileURLWithPath:[NSTemporaryDirectory() stringByAppendingPathComponent:@"test-image.jpeg"]]; + [[NSFileManager defaultManager] removeItemAtURL:imageFileURL error:nil]; + UIImage *generatedTestImage = ATLTestAttachmentMakeImageWithSize(CGSizeMake(1920, 1080)); + NSData *imageData = UIImageJPEGRepresentation(generatedTestImage, 1.0); + [imageData writeToURL:imageFileURL atomically:NO]; + expect([[NSFileManager defaultManager] fileExistsAtPath:imageFileURL.path]).to.beTruthy(); + + // Try opening and then closing the stream for direct photo streaming. + ATLMediaInputStream *streamDirect = [ATLMediaInputStream mediaInputStreamWithFileURL:imageFileURL]; + expect(streamDirect.isLossless).to.beTruthy(); + expect(streamDirect.streamStatus).to.equal(NSStreamStatusNotOpen); + expect(streamDirect.streamError).to.beNil(); + [streamDirect open]; + expect(streamDirect.streamStatus).to.equal(NSStreamStatusOpen); + expect(streamDirect.streamError).to.beNil(); + [streamDirect close]; + expect(streamDirect.streamStatus).to.equal(NSStreamStatusClosed); + expect(streamDirect.streamError).to.beNil(); + + // Try opening and then closing the stream for resampled photo streaming. + ATLMediaInputStream *streamResample = [ATLMediaInputStream mediaInputStreamWithFileURL:imageFileURL]; + streamResample.maximumSize = 512; + streamResample.compressionQuality = 0.5f; + expect(streamResample.isLossless).to.beFalsy(); + expect(streamResample.streamStatus).to.equal(NSStreamStatusNotOpen); + expect(streamResample.streamError).to.beNil(); + [streamResample open]; + expect(streamResample.streamStatus).to.equal(NSStreamStatusOpen); + expect(streamResample.streamError).to.beNil(); + [streamResample close]; + expect(streamResample.streamStatus).to.equal(NSStreamStatusClosed); + expect(streamResample.streamError).to.beNil(); +} + +- (void)testMediaStreamStreamsPhotoFileDirectly +{ + // Generate a test image at a temporary path. + NSURL *imageFileURL = [NSURL fileURLWithPath:[NSTemporaryDirectory() stringByAppendingPathComponent:@"test-image.jpeg"]]; + [[NSFileManager defaultManager] removeItemAtURL:imageFileURL error:nil]; + UIImage *generatedTestImage = ATLTestAttachmentMakeImageWithSize(CGSizeMake(1920, 1080)); + NSData *imageData = UIImageJPEGRepresentation(generatedTestImage, 1.0); + [imageData writeToURL:imageFileURL atomically:NO]; + expect([[NSFileManager defaultManager] fileExistsAtPath:imageFileURL.path]).to.beTruthy(); + + // Stream the content of the photo file. + ATLMediaInputStream *stream = [ATLMediaInputStream mediaInputStreamWithFileURL:imageFileURL]; + [stream open]; + expect(stream.streamStatus).to.equal(NSStreamStatusOpen); + expect(stream.isLossless).to.beTruthy(); + expect(stream.streamError).to.beNil(); + + NSMutableData *data = [NSMutableData data]; + NSUInteger size = 512 * 1024; + uint8_t *buffer = malloc(size); + NSInteger bytesRead = 0; + do { + bytesRead = [stream read:buffer maxLength:size]; + expect(stream.streamError).to.beNil(); + [data appendBytes:buffer length:bytesRead]; + } while (bytesRead > 0); + free(buffer); + + expect(stream.streamStatus).to.equal(NSStreamStatusAtEnd); + [stream close]; + expect(stream.streamStatus).to.equal(NSStreamStatusClosed); + expect(stream.streamError).to.beNil(); + + NSString *path = [NSString stringWithFormat:@"%@test.jpeg", NSTemporaryDirectory()]; + [data writeToFile:path atomically:NO]; + + // Check the streamed image properties + UIImage *streamedImage = [UIImage imageWithData:data]; + expect(streamedImage.size.width).to.equal(1920); + expect(streamedImage.size.height).to.equal(1080); +} + +- (void)testMediaStreamStreamsPhotoFileWithResampling +{ + // Generate a test image at a temporary path. + NSURL *imageFileURL = [NSURL fileURLWithPath:[NSTemporaryDirectory() stringByAppendingPathComponent:@"test-image.jpeg"]]; + [[NSFileManager defaultManager] removeItemAtURL:imageFileURL error:nil]; + UIImage *generatedTestImage = ATLTestAttachmentMakeImageWithSize(CGSizeMake(1920, 1080)); + NSData *imageData = UIImageJPEGRepresentation(generatedTestImage, 1.0); + [imageData writeToURL:imageFileURL atomically:NO]; + expect([[NSFileManager defaultManager] fileExistsAtPath:imageFileURL.path]).to.beTruthy(); + + // Stream the content of the photo file. + ATLMediaInputStream *stream = [ATLMediaInputStream mediaInputStreamWithFileURL:imageFileURL]; + stream.maximumSize = 512; + stream.compressionQuality = 0.5f; + expect(stream.isLossless).to.beFalsy(); + [stream open]; + expect(stream.streamStatus).to.equal(NSStreamStatusOpen); + expect(stream.streamError).to.beNil(); + + NSMutableData *data = [NSMutableData data]; + NSUInteger size = 512 * 1024; + uint8_t *buffer = malloc(size); + NSInteger bytesRead = 0; + do { + bytesRead = [stream read:buffer maxLength:size]; + expect(stream.streamError).to.beNil(); + [data appendBytes:buffer length:bytesRead]; + } while (bytesRead > 0); + free(buffer); + + expect(stream.streamStatus).to.equal(NSStreamStatusAtEnd); + [stream close]; + expect(stream.streamStatus).to.equal(NSStreamStatusClosed); + expect(stream.streamError).to.beNil(); + + NSString *path = [NSString stringWithFormat:@"%@test.jpeg", NSTemporaryDirectory()]; + [data writeToFile:path atomically:NO]; + + // Check the streamed image properties + UIImage *streamedImage = [UIImage imageWithData:data]; + expect(streamedImage.size.width).to.equal(512); + expect(streamedImage.size.height).to.equal(((float)512/1920) * 1080); +} + +#pragma mark - Video Input Stream + +- (void)testMakeVideoWritesToDesiredPath +{ + NSURL *outputFileURL = [NSURL fileURLWithPath:[NSTemporaryDirectory() stringByAppendingPathComponent:@"temporary-test-video.mov"]]; + ATLTestMakeVideo(outputFileURL, CGSizeMake(1280, 720), 30, 2); + expect([[NSFileManager defaultManager] fileExistsAtPath:outputFileURL.path]).to.beTruthy(); +} + +- (void)testInputMediaStreamForVideo +{ + NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, 1, YES); + NSString *documentsDirectory = [paths objectAtIndex:0]; + int count; + ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init]; + ALAsset *VideoSourceAsset = ATLVideoAssetTestObtainLastVideoFromAssetLibrary(library); + expect(VideoSourceAsset).toNot.beNil(); + + NSURL *LastVideoURL = VideoSourceAsset.defaultRepresentation.url; + //Video Length + ALAssetRepresentation *rep = [VideoSourceAsset defaultRepresentation]; + Byte *buffer1 = (Byte*)malloc(rep.size); + NSError *error = nil; + NSUInteger buffered = [rep getBytes:buffer1 fromOffset:0.0 length:rep.size error:&error]; + NSData *data1 = [NSData dataWithBytesNoCopy:buffer1 length:buffered freeWhenDone:YES]; + NSLog(@"Size of video %lu",(unsigned long)data1.length); + + ATLMediaInputStream *stream = [ATLMediaInputStream mediaInputStreamWithAssetURL:LastVideoURL]; + + [stream open]; + expect(stream.streamStatus).to.equal(NSStreamStatusOpen); + expect(stream.streamError).to.beNil(); + + NSMutableData *data = [NSMutableData data]; + NSUInteger sizeOfBuffer = 512 * 1024; + NSUInteger sizeOfRead = 512*1024; + uint8_t *buffer = malloc(sizeOfBuffer); + NSInteger bytesRead = 0; + do { + bytesRead = [stream read:buffer maxLength:sizeOfRead]; + expect(stream.streamError).to.beNil(); + [data appendBytes:buffer length:bytesRead]; + } while (bytesRead > 0); + free(buffer); + + expect(stream.streamStatus).to.equal(NSStreamStatusAtEnd); + [stream close]; + expect(stream.streamStatus).to.equal(NSStreamStatusClosed); + expect(stream.streamError).to.beNil(); + + NSString *path = [NSString stringWithFormat:@"%@test.mp4", NSTemporaryDirectory()]; + [data writeToFile:path atomically:NO]; + NSLog(@"check file: %@ length=%lu", path, data.length); + + NSArray *directoryContent = [[NSFileManager defaultManager] contentsOfDirectoryAtPath:documentsDirectory error:NULL]; + for (count = 0; count < (int)[directoryContent count]; count++) { + NSLog(@"File %d: %@", (count + 1), [directoryContent objectAtIndex:count]); + } + expect([directoryContent count]).to.equal(0); +} + +-(void)testVideoStreamOpenStream +{ + ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init]; + ALAsset *sourceAsset = ATLVideoAssetTestObtainLastVideoFromAssetLibrary(library); + expect(sourceAsset).toNot.beNil(); + + NSURL *lastVideoURL = sourceAsset.defaultRepresentation.url; + + ATLMediaInputStream *streamDirect = [ATLMediaInputStream mediaInputStreamWithAssetURL:lastVideoURL]; + expect(streamDirect.isLossless).to.beTruthy(); + expect(streamDirect.streamStatus).to.equal(NSStreamStatusNotOpen); + expect(streamDirect.streamError).to.beNil(); + [streamDirect open]; + expect(streamDirect.streamStatus).to.equal(NSStreamStatusOpen); + expect(streamDirect.streamError).to.beNil(); + + ATLMediaInputStream *streamResample = [ATLMediaInputStream mediaInputStreamWithAssetURL:lastVideoURL]; + streamResample.maximumSize = 512; + streamResample.compressionQuality = 0.5f; + expect(streamResample.isLossless).to.beFalsy(); + expect(streamResample.streamStatus).to.equal(NSStreamStatusNotOpen); + expect(streamResample.streamError).to.beNil(); + [streamResample open]; + expect(streamResample.streamStatus).to.equal(NSStreamStatusOpen); + expect(streamResample.streamError).to.beNil(); +} + +-(void)testVideoStreamCloseStream +{ + ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init]; + ALAsset *sourceAsset = ATLVideoAssetTestObtainLastVideoFromAssetLibrary(library); + expect(sourceAsset).toNot.beNil(); + + NSURL *lastVideoURL = sourceAsset.defaultRepresentation.url; + + ATLMediaInputStream *streamDirect = [ATLMediaInputStream mediaInputStreamWithAssetURL:lastVideoURL]; + expect(streamDirect.streamStatus).to.equal(NSStreamStatusNotOpen); + expect(streamDirect.streamError).to.beNil(); + [streamDirect open]; + expect(streamDirect.streamStatus).to.equal(NSStreamStatusOpen); + expect(streamDirect.streamError).to.beNil(); + [streamDirect close]; + expect(streamDirect.streamStatus).to.equal(NSStreamStatusClosed); + expect(streamDirect.streamError).to.beNil(); + + ATLMediaInputStream *streamResample = [ATLMediaInputStream mediaInputStreamWithAssetURL:lastVideoURL]; + streamResample.maximumSize = 512; + streamResample.compressionQuality = 0.5f; + expect(streamResample.streamStatus).to.equal(NSStreamStatusNotOpen); + expect(streamResample.streamError).to.beNil(); + [streamResample open]; + expect(streamResample.streamStatus).to.equal(NSStreamStatusOpen); + expect(streamResample.streamError).to.beNil(); + [streamResample close]; + expect(streamResample.streamStatus).to.equal(NSStreamStatusClosed); + expect(streamResample.streamError).to.beNil(); +} + +- (void)testVideoStreamReadsStreamRenameMe +{ + ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init]; + ALAsset *sourceAsset = ATLVideoAssetTestObtainLastVideoFromAssetLibrary(library); + expect(sourceAsset).toNot.beNil(); + + NSURL *lastVideoURL = sourceAsset.defaultRepresentation.url; + + ATLMediaInputStream *stream = [ATLMediaInputStream mediaInputStreamWithAssetURL:lastVideoURL]; + [stream open]; + expect(stream.streamStatus).to.equal(NSStreamStatusOpen); + expect(stream.streamError).to.beNil(); + + NSMutableData *data = [NSMutableData data]; + NSUInteger size = 512 * 1024; + uint8_t *buffer = malloc(size); + NSInteger bytesRead = 0; + do { + bytesRead = [stream read:buffer maxLength:size]; + expect(stream.streamError).to.beNil(); + [data appendBytes:buffer length:bytesRead]; + } while (bytesRead > 0); + free(buffer); + + expect(stream.streamStatus).to.equal(NSStreamStatusAtEnd); + [stream close]; + expect(stream.streamStatus).to.equal(NSStreamStatusClosed); + expect(stream.streamError).to.beNil(); + + NSString *path = [NSString stringWithFormat:@"%@test.mp4", NSTemporaryDirectory()]; + [data writeToFile:path atomically:NO]; + NSLog(@"check file: %@ length=%lu", path, data.length); +} + +-(void)testTempVideoFilesCleaned +{ + NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, 1, YES); + NSString *documentsDirectory = [paths objectAtIndex:0]; + + ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init]; + ALAsset *sourceAsset = ATLVideoAssetTestObtainLastVideoFromAssetLibrary(library); + expect(sourceAsset).toNot.beNil(); + + NSURL *lastVideoURL = sourceAsset.defaultRepresentation.url; + + ATLMediaInputStream *stream = [ATLMediaInputStream mediaInputStreamWithAssetURL:lastVideoURL]; + [stream open]; + expect(stream.streamStatus).to.equal(NSStreamStatusOpen); + expect(stream.streamError).to.beNil(); + + NSMutableData *data = [NSMutableData data]; + NSUInteger size = 512 * 1024; + uint8_t *buffer = malloc(size); + NSInteger bytesRead = 0; + do { + bytesRead = [stream read:buffer maxLength:size]; + expect(stream.streamError).to.beNil(); + [data appendBytes:buffer length:bytesRead]; + } while (bytesRead > 0); + free(buffer); + expect(stream.streamStatus).to.equal(NSStreamStatusAtEnd); + [stream close]; + + int count; + + NSArray *directoryContent = [[NSFileManager defaultManager] contentsOfDirectoryAtPath:documentsDirectory error:NULL]; + for (count = 0; count < (int)[directoryContent count]; count++) { + NSLog(@"File %d: %@", (count + 1), [directoryContent objectAtIndex:count]); + } + expect([directoryContent count]).to.equal(0); +} + +- (void)testMediaStreamOpensStreamForVideoFile +{ + // Generate a video image at a temporary path. + NSURL *videoFileURL = [NSURL fileURLWithPath:[NSTemporaryDirectory() stringByAppendingPathComponent:@"temporary-test-video.mov"]]; + ATLTestMakeVideo(videoFileURL, CGSizeMake(1280, 720), 30, 2); + expect([[NSFileManager defaultManager] fileExistsAtPath:videoFileURL.path]).to.beTruthy(); + + + // Try opening a stream for direct photo streaming. + ATLMediaInputStream *streamDirect = [ATLMediaInputStream mediaInputStreamWithFileURL:videoFileURL]; + expect(streamDirect.isLossless).to.beTruthy(); + expect(streamDirect.streamStatus).to.equal(NSStreamStatusNotOpen); + expect(streamDirect.streamError).to.beNil(); + [streamDirect open]; + expect(streamDirect.streamStatus).to.equal(NSStreamStatusOpen); + expect(streamDirect.streamError).to.beNil(); + + // Try opening a stream for resampled photo streaming. + ATLMediaInputStream *streamResample = [ATLMediaInputStream mediaInputStreamWithFileURL:videoFileURL]; + streamResample.maximumSize = 512; + streamResample.compressionQuality = 0.5f; + expect(streamResample.isLossless).to.beFalsy(); + expect(streamResample.streamStatus).to.equal(NSStreamStatusNotOpen); + expect(streamResample.streamError).to.beNil(); + [streamResample open]; + expect(streamResample.streamStatus).to.equal(NSStreamStatusOpen); + expect(streamResample.streamError).to.beNil(); +} + +- (void)testMediaStreamClosesStreamForVideoFile +{ + // Generate a video image at a temporary path. + NSURL *videoFileURL = [NSURL fileURLWithPath:[NSTemporaryDirectory() stringByAppendingPathComponent:@"temporary-test-video.mov"]]; + ATLTestMakeVideo(videoFileURL, CGSizeMake(1280, 720), 30, 2); + expect([[NSFileManager defaultManager] fileExistsAtPath:videoFileURL.path]).to.beTruthy(); + + // Try opening and then closing the stream for direct photo streaming. + ATLMediaInputStream *streamDirect = [ATLMediaInputStream mediaInputStreamWithFileURL:videoFileURL]; + expect(streamDirect.isLossless).to.beTruthy(); + expect(streamDirect.streamStatus).to.equal(NSStreamStatusNotOpen); + expect(streamDirect.streamError).to.beNil(); + [streamDirect open]; + expect(streamDirect.streamStatus).to.equal(NSStreamStatusOpen); + expect(streamDirect.streamError).to.beNil(); + [streamDirect close]; + expect(streamDirect.streamStatus).to.equal(NSStreamStatusClosed); + expect(streamDirect.streamError).to.beNil(); + + // Try opening and then closing the stream for resampled photo streaming. + ATLMediaInputStream *streamResample = [ATLMediaInputStream mediaInputStreamWithFileURL:videoFileURL]; + streamResample.maximumSize = 512; + streamResample.compressionQuality = 0.5f; + expect(streamResample.isLossless).to.beFalsy(); + expect(streamResample.streamStatus).to.equal(NSStreamStatusNotOpen); + expect(streamResample.streamError).to.beNil(); + [streamResample open]; + expect(streamResample.streamStatus).to.equal(NSStreamStatusOpen); + expect(streamResample.streamError).to.beNil(); + [streamResample close]; + expect(streamResample.streamStatus).to.equal(NSStreamStatusClosed); + expect(streamResample.streamError).to.beNil(); +} + +#pragma mark - General Behavior + +- (void)testMediaStreamDesignatedInitFails +{ + expect(^{ + __unused ATLMediaInputStream *streamDirect = [[ATLMediaInputStream alloc] init]; + }).to.raise(NSInternalInconsistencyException); +} + @end diff --git a/Tests/ATLMessageCollectionViewCellTest.m b/Tests/ATLMessageCollectionViewCellTest.m index fe939e58a..9c2f11d67 100644 --- a/Tests/ATLMessageCollectionViewCellTest.m +++ b/Tests/ATLMessageCollectionViewCellTest.m @@ -46,7 +46,7 @@ - (void)setUp LYRClientMock *layerClient = [LYRClientMock layerClientMockWithAuthenticatedUserID:mockUser.participantIdentifier]; self.testInterface = [ATLTestInterface testIntefaceWithLayerClient:layerClient]; [self setRootViewController]; - + [self resetAppearance]; } - (void)tearDown @@ -55,7 +55,6 @@ - (void)tearDown self.conversation = nil; self.controller = nil; [[LYRMockContentStore sharedStore] resetContentStore]; - [self resetAppearance]; [super tearDown]; } @@ -92,7 +91,18 @@ - (void)testToVerifyMessageBubbleViewWithImage ATLMessageCollectionViewCell *cell = [ATLMessageCollectionViewCell new]; [cell presentMessage:(LYRMessage *)message]; - expect(cell.bubbleView.bubbleImageView.image).toNot.beNil; + expect(cell.bubbleView.bubbleImageView.image).willNot.beNil; + expect(cell.bubbleView.bubbleViewLabel.text).to.beNil; +} + +- (void)testToVerifyMessageBubbleViewWithGIF +{ + LYRMessagePartMock *imagePart = ATLMessagePartWithGIFImage([UIImage new]); + LYRMessageMock *message = [self.testInterface.layerClient newMessageWithParts:@[imagePart] options:nil error:nil]; + + ATLMessageCollectionViewCell *cell = [ATLMessageCollectionViewCell new]; + [cell presentMessage:(LYRMessage *)message]; + expect(cell.bubbleView.bubbleImageView.image).willNot.beNil; expect(cell.bubbleView.bubbleViewLabel.text).to.beNil; } @@ -109,6 +119,66 @@ - (void)testToVerifyMessageBubbleViewWithLocation expect(cell.bubbleView.bubbleViewLabel.text).to.beNil; } +- (void)testToVerifyTextCheckingTypeLink +{ + NSString *link = @"www.layer.com"; + NSString *phoneNumber = @"734-769-6526"; + NSString *linkAndPhoneNumber = [NSString stringWithFormat:@"%@ and %@", link, phoneNumber]; + LYRMessagePartMock *part = [LYRMessagePartMock messagePartWithText:linkAndPhoneNumber]; LYRMessageMock *message = [self.testInterface.layerClient newMessageWithParts:@[part] options:nil error:nil]; + + ATLMessageCollectionViewCell *cell = [ATLMessageCollectionViewCell new]; + [cell presentMessage:(LYRMessage *)message]; + + NSRange linkRange = [linkAndPhoneNumber rangeOfString:link]; + NSDictionary *linkAttributes = [cell.bubbleView.bubbleViewLabel.attributedText attributesAtIndex:linkRange.location effectiveRange:&linkRange]; + expect(linkAttributes[NSUnderlineStyleAttributeName]).to.equal(NSUnderlineStyleSingle); + + NSRange phoneNumberRange = [linkAndPhoneNumber rangeOfString:phoneNumber]; + NSDictionary *phoneNumberAttributes = [cell.bubbleView.bubbleViewLabel.attributedText attributesAtIndex:phoneNumberRange.location effectiveRange:&phoneNumberRange]; + expect(phoneNumberAttributes[NSUnderlineStyleAttributeName]).toNot.equal(NSUnderlineStyleSingle); +} + +- (void)testToVerifyTextCheckingTypePhoneNumber +{ + NSString *link = @"www.layer.com"; + NSString *phoneNumber = @"734-769-6526"; + NSString *linkAndPhoneNumber = [NSString stringWithFormat:@"%@ and %@", link, phoneNumber]; + LYRMessagePartMock *part = [LYRMessagePartMock messagePartWithText:linkAndPhoneNumber]; LYRMessageMock *message = [self.testInterface.layerClient newMessageWithParts:@[part] options:nil error:nil]; + + ATLMessageCollectionViewCell *cell = [ATLMessageCollectionViewCell new]; + cell.messageTextCheckingTypes = NSTextCheckingTypePhoneNumber; + [cell presentMessage:(LYRMessage *)message]; + + NSRange linkRange = [linkAndPhoneNumber rangeOfString:link]; + NSDictionary *linkAttributes = [cell.bubbleView.bubbleViewLabel.attributedText attributesAtIndex:linkRange.location effectiveRange:&linkRange]; + expect(linkAttributes[NSUnderlineStyleAttributeName]).toNot.equal(NSUnderlineStyleSingle); + + NSRange phoneNumberRange = [linkAndPhoneNumber rangeOfString:phoneNumber]; + NSDictionary *phoneNumberAttributes = [cell.bubbleView.bubbleViewLabel.attributedText attributesAtIndex:phoneNumberRange.location effectiveRange:&phoneNumberRange]; + expect(phoneNumberAttributes[NSUnderlineStyleAttributeName]).to.equal(NSUnderlineStyleSingle); +} + +- (void)testToVerifytextCheckingTypeLinkAndPhoneNumber +{ + NSString *link = @"www.layer.com"; + NSString *phoneNumber = @"734-769-6526"; + NSString *linkAndPhoneNumber = [NSString stringWithFormat:@"%@ and %@", link, phoneNumber]; + LYRMessagePartMock *part = [LYRMessagePartMock messagePartWithText:linkAndPhoneNumber]; + LYRMessageMock *message = [self.testInterface.layerClient newMessageWithParts:@[part] options:nil error:nil]; + + ATLMessageCollectionViewCell *cell = [ATLMessageCollectionViewCell new]; + cell.messageTextCheckingTypes = NSTextCheckingTypeLink | NSTextCheckingTypePhoneNumber; + [cell presentMessage:(LYRMessage *)message]; + + NSRange linkRange = [linkAndPhoneNumber rangeOfString:link]; + NSDictionary *linkAttributes = [cell.bubbleView.bubbleViewLabel.attributedText attributesAtIndex:linkRange.location effectiveRange:&linkRange]; + expect(linkAttributes[NSUnderlineStyleAttributeName]).to.equal(NSUnderlineStyleSingle); + + NSRange phoneNumberRange = [linkAndPhoneNumber rangeOfString:phoneNumber]; + NSDictionary *phoneNumberAttributes = [cell.bubbleView.bubbleViewLabel.attributedText attributesAtIndex:phoneNumberRange.location effectiveRange:&phoneNumberRange]; + expect(phoneNumberAttributes[NSUnderlineStyleAttributeName]).to.equal(NSUnderlineStyleSingle); +} + #pragma mark - Outgoing Customization - (void)testToVerifyOutgoingCustomMessageTextFont @@ -271,6 +341,7 @@ - (void)createIncomingMesssageWithText:(NSString *)text LYRMessagePartMock *part = [LYRMessagePartMock messagePartWithText:text]; LYRMessageMock *message = [layerClient newMessageWithParts:@[part] options:nil error:nil]; [self.conversation sendMessage:message error:nil]; + [tester waitForAnimationsToFinish]; } - (void)setRootViewController diff --git a/Tests/ATLMessageInputBarTest.m b/Tests/ATLMessageInputBarTest.m index 534426132..85a55d959 100644 --- a/Tests/ATLMessageInputBarTest.m +++ b/Tests/ATLMessageInputBarTest.m @@ -23,6 +23,8 @@ #import "ATLTestInterface.h" #import "ATLSampleConversationViewController.h" #import "ATLMediaAttachment.h" +#import "ATLConstants.h" +#import "ATLTestUtilities.h" @interface ATLConversationViewController () @@ -34,6 +36,7 @@ @interface ATLMessageInputBarTest :XCTestCase @property (nonatomic) ATLTestInterface *testInterface; @property (nonatomic) ATLSampleConversationViewController *viewController; +@property (nonatomic) LYRConversation *conversation; @end @@ -55,11 +58,7 @@ - (void)setUp self.testInterface = [ATLTestInterface testIntefaceWithLayerClient:layerClient]; ATLUserMock *mockUser1 = [ATLUserMock userWithMockUserName:ATLMockUserNameKlemen]; - LYRConversationMock *conversation1 = [self.testInterface conversationWithParticipants:[NSSet setWithObject:mockUser1.participantIdentifier] lastMessageText:nil]; - - self.viewController = [ATLSampleConversationViewController conversationViewControllerWithLayerClient:(LYRClient *)self.testInterface.layerClient]; - self.viewController.conversation = (LYRConversation *)conversation1; - [self setRootViewController:self.viewController]; + self.conversation = (LYRConversation *)[self.testInterface conversationWithParticipants:[NSSet setWithObject:mockUser1.participantIdentifier] lastMessageText:nil]; } - (void)tearDown @@ -72,6 +71,7 @@ - (void)tearDown - (void)testToVerifyMessageInputToolbarUI { + [self setRootViewController]; [tester waitForViewWithAccessibilityLabel:ATLMessageInputToolbarCameraButton]; [tester waitForViewWithAccessibilityLabel:ATLMessageInputToolbarTextInputView]; [tester waitForViewWithAccessibilityLabel:ATLMessageInputToolbarLocationButton]; @@ -80,6 +80,7 @@ - (void)testToVerifyMessageInputToolbarUI - (void)testToVerifyToVerifyTextChangesLocationButtonToSendButton { + [self setRootViewController]; [tester waitForViewWithAccessibilityLabel:ATLMessageInputToolbarAccessibilityLabel]; [tester waitForViewWithAccessibilityLabel:ATLMessageInputToolbarLocationButton]; [tester enterText:@"A" intoViewWithAccessibilityLabel:ATLMessageInputToolbarTextInputView]; @@ -94,6 +95,7 @@ - (void)testToVerifyToVerifyTextChangesLocationButtonToSendButton - (void)testToVerifyRightAccessoryButtonDelegateFunctionality { + [self setRootViewController]; ATLMessageInputToolbar *toolBar = (ATLMessageInputToolbar *)[tester waitForViewWithAccessibilityLabel:ATLMessageInputToolbarAccessibilityLabel]; id delegateMock = OCMProtocolMock(@protocol(ATLMessageInputToolbarDelegate)); toolBar.inputToolBarDelegate = delegateMock; @@ -110,6 +112,7 @@ - (void)testToVerifyRightAccessoryButtonDelegateFunctionality - (void)testToVerifyLeftAccessoryButtonDelegateFunctionality { + [self setRootViewController]; ATLMessageInputToolbar *toolBar = (ATLMessageInputToolbar *)[tester waitForViewWithAccessibilityLabel:ATLMessageInputToolbarAccessibilityLabel]; id delegateMock = OCMProtocolMock(@protocol(ATLMessageInputToolbarDelegate)); toolBar.inputToolBarDelegate = delegateMock; @@ -126,6 +129,7 @@ - (void)testToVerifyLeftAccessoryButtonDelegateFunctionality - (void)testToVerifyMessageEnteredIsConsitentWithMessageToBeSent { + [self setRootViewController]; ATLMessageInputToolbar *toolBar = (ATLMessageInputToolbar *)[tester waitForViewWithAccessibilityLabel:ATLMessageInputToolbarAccessibilityLabel]; id delegateMock = OCMProtocolMock(@protocol(ATLMessageInputToolbarDelegate)); toolBar.inputToolBarDelegate = delegateMock; @@ -148,6 +152,7 @@ - (void)testToVerifyMessageEnteredIsConsitentWithMessageToBeSent - (void)testToVerifyButtonEnablement { + [self setRootViewController]; ATLMessageInputToolbar *toolBar = (ATLMessageInputToolbar *)[tester waitForViewWithAccessibilityLabel:ATLMessageInputToolbarAccessibilityLabel]; expect(toolBar.rightAccessoryButton.highlighted).to.beTruthy; expect(toolBar.rightAccessoryButton.enabled).to.beTruthy; @@ -158,6 +163,7 @@ - (void)testToVerifyButtonEnablement - (void)testToVerifyTextEnterendDoesNotEnableButtons { + [self setRootViewController]; self.viewController.conversation = nil; ATLMessageInputToolbar *toolBar = (ATLMessageInputToolbar *)[tester waitForViewWithAccessibilityLabel:ATLMessageInputToolbarAccessibilityLabel]; @@ -174,6 +180,7 @@ - (void)testToVerifyTextEnterendDoesNotEnableButtons - (void)testToVerifySendingMessageWithPhoto { + [self setRootViewController]; ATLMessageInputToolbar *toolBar = self.viewController.messageInputToolbar; id delegateMock = OCMProtocolMock(@protocol(ATLMessageInputToolbarDelegate)); toolBar.inputToolBarDelegate = delegateMock; @@ -192,15 +199,16 @@ - (void)testToVerifySendingMessageWithPhoto [tester enterText:testText intoViewWithAccessibilityLabel:ATLMessageInputToolbarTextInputView]; - UIImage *image = [UIImage imageNamed:@"test-logo"]; + UIImage *image = ATLTestAttachmentMakeImageWithSize(CGSizeMake(1024, 512)); ATLMediaAttachment *imageAttachment = [ATLMediaAttachment mediaAttachmentWithImage:image metadata:nil thumbnailSize:100]; - [toolBar insertMediaAttachment:imageAttachment]; + [toolBar insertMediaAttachment:imageAttachment withEndLineBreak:YES]; [tester tapViewWithAccessibilityLabel:ATLMessageInputToolbarSendButton]; [delegateMock verify]; } - (void)testToVerifySending1LineOfTextWith2Photos { + [self setRootViewController]; ATLMessageInputToolbar *toolBar = (ATLMessageInputToolbar *)[tester waitForViewWithAccessibilityLabel:@"Message Input Toolbar"]; id delegateMock = OCMProtocolMock(@protocol(ATLMessageInputToolbarDelegate)); toolBar.inputToolBarDelegate = delegateMock; @@ -215,16 +223,17 @@ - (void)testToVerifySending1LineOfTextWith2Photos }] messageInputToolbar:toolBar didTapRightAccessoryButton:[OCMArg any]]; [tester enterText:testText intoViewWithAccessibilityLabel:ATLMessageInputToolbarTextInputView]; - UIImage *image = [UIImage imageNamed:@"test-logo"]; + UIImage *image = ATLTestAttachmentMakeImageWithSize(CGSizeMake(1024, 512)); ATLMediaAttachment *imageAttachment = [ATLMediaAttachment mediaAttachmentWithImage:image metadata:nil thumbnailSize:100]; - [toolBar insertMediaAttachment:imageAttachment]; - [toolBar insertMediaAttachment:imageAttachment]; + [toolBar insertMediaAttachment:imageAttachment withEndLineBreak:YES]; + [toolBar insertMediaAttachment:imageAttachment withEndLineBreak:YES]; [tester tapViewWithAccessibilityLabel:ATLMessageInputToolbarSendButton]; [delegateMock verify]; } - (void)testToVerifySending5Photos { + [self setRootViewController]; ATLMessageInputToolbar *toolBar = (ATLMessageInputToolbar *)[tester waitForViewWithAccessibilityLabel:@"Message Input Toolbar"]; id delegateMock = OCMProtocolMock(@protocol(ATLMessageInputToolbarDelegate)); toolBar.inputToolBarDelegate = delegateMock; @@ -239,63 +248,88 @@ - (void)testToVerifySending5Photos expect([parts objectAtIndex:4]).to.beKindOf([ATLMediaAttachment class]); }] messageInputToolbar:toolBar didTapRightAccessoryButton:[OCMArg any]]; - UIImage *image = [UIImage imageNamed:@"test-logo"]; + UIImage *image = ATLTestAttachmentMakeImageWithSize(CGSizeMake(1024, 512)); ATLMediaAttachment *imageAttachment = [ATLMediaAttachment mediaAttachmentWithImage:image metadata:nil thumbnailSize:100]; - [toolBar insertMediaAttachment:imageAttachment]; - [toolBar insertMediaAttachment:imageAttachment]; - [toolBar insertMediaAttachment:imageAttachment]; - [toolBar insertMediaAttachment:imageAttachment]; - [toolBar insertMediaAttachment:imageAttachment]; + [toolBar insertMediaAttachment:imageAttachment withEndLineBreak:YES]; + [toolBar insertMediaAttachment:imageAttachment withEndLineBreak:YES]; + [toolBar insertMediaAttachment:imageAttachment withEndLineBreak:YES]; + [toolBar insertMediaAttachment:imageAttachment withEndLineBreak:YES]; + [toolBar insertMediaAttachment:imageAttachment withEndLineBreak:YES]; [tester tapViewWithAccessibilityLabel:ATLMessageInputToolbarSendButton]; [delegateMock verify]; } -- (void)testToVerifyHeightOfInputBarIsCapped +- (void)testToVerifySelectingAndRemovingAnImageKeepsFontConsistent { + [self setRootViewController]; ATLMessageInputToolbar *toolBar = (ATLMessageInputToolbar *)[tester waitForViewWithAccessibilityLabel:@"Message Input Toolbar"]; - CGFloat toolbarHeight = toolBar.frame.size.height; - CGFloat toolbarNewHeight; - toolBar.maxNumberOfLines = 3; - - [tester tapViewWithAccessibilityLabel:ATLMessageInputToolbarTextInputView]; - [tester enterText:@"" intoViewWithAccessibilityLabel:ATLMessageInputToolbarAccessibilityLabel]; - [tester tapViewWithAccessibilityLabel:@"RETURN"]; - toolbarNewHeight = toolBar.frame.size.height; - expect(toolbarNewHeight).to.beGreaterThan(toolbarHeight); - toolbarHeight = toolBar.frame.size.height; + UIFont *font = toolBar.textInputView.font; - [tester tapViewWithAccessibilityLabel:@"RETURN"]; - toolbarNewHeight = toolBar.frame.size.height; - expect(toolbarNewHeight).to.beGreaterThan(toolbarHeight); - toolbarHeight = toolBar.frame.size.height; + UIImage *image = ATLTestAttachmentMakeImageWithSize(CGSizeMake(1024, 512)); + ATLMediaAttachment *imageAttachment = [ATLMediaAttachment mediaAttachmentWithImage:image metadata:nil thumbnailSize:100]; + [toolBar insertMediaAttachment:imageAttachment withEndLineBreak:YES]; + + [tester clearTextFromViewWithAccessibilityLabel:ATLMessageInputToolbarTextInputView]; + expect(font).to.equal(toolBar.textInputView.font); +} + +- (void)testToVerifyRightAcccessoryButtonEnablementWithImage +{ + [self setRootViewController]; + ATLMessageInputToolbar *toolBar = (ATLMessageInputToolbar *)[tester waitForViewWithAccessibilityLabel:@"Message Input Toolbar"]; + UIImage *image = ATLTestAttachmentMakeImageWithSize(CGSizeMake(1024, 512)); + toolBar.rightAccessoryImage = image; + expect(toolBar.rightAccessoryButton.imageView.image).to.equal(image); + expect(toolBar.rightAccessoryButton.enabled).to.beTruthy(); +} + +- (void)testToVerifyRightAcccessoryButtonEnablementWithoutImage +{ + [self setRootViewController]; + ATLMessageInputToolbar *toolBar = (ATLMessageInputToolbar *)[tester waitForViewWithAccessibilityLabel:@"Message Input Toolbar"]; + toolBar.displaysRightAccessoryImage = NO; - [tester tapViewWithAccessibilityLabel:@"RETURN"]; - toolbarNewHeight = toolBar.frame.size.height; - expect(toolbarNewHeight).to.equal(toolbarHeight); - toolbarHeight = toolBar.frame.size.height; + [tester enterText:@"test" intoViewWithAccessibilityLabel:ATLMessageInputToolbarTextInputView]; + expect(toolBar.rightAccessoryButton.enabled).to.beTruthy(); - [tester tapViewWithAccessibilityLabel:@"RETURN"]; - toolbarNewHeight = toolBar.frame.size.height; - expect(toolbarNewHeight).to.equal(toolbarHeight); + [tester clearTextFromViewWithAccessibilityLabel:ATLMessageInputToolbarTextInputView]; + expect(toolBar.rightAccessoryButton.enabled).to.beFalsy(); } -- (void)testToVerifySelectingAndRemovingAnImageKeepsFontConsistent +- (void)testToVerifyCustomAccessoryButtonImages { + [self setRootViewController]; ATLMessageInputToolbar *toolBar = (ATLMessageInputToolbar *)[tester waitForViewWithAccessibilityLabel:@"Message Input Toolbar"]; - UIFont *font = toolBar.textInputView.font; + UIImage *image = ATLTestAttachmentMakeImageWithSize(CGSizeMake(1024, 512)); + toolBar.rightAccessoryImage = image; + toolBar.leftAccessoryImage = image; - UIImage *image = [UIImage imageNamed:@"test-logo"]; - ATLMediaAttachment *imageAttachment = [ATLMediaAttachment mediaAttachmentWithImage:image metadata:nil thumbnailSize:100]; - [toolBar insertMediaAttachment:imageAttachment]; + expect(toolBar.rightAccessoryButton.imageView.image).to.equal(image); + expect(toolBar.leftAccessoryButton.imageView.image).to.equal(image); +} - [tester clearTextFromViewWithAccessibilityLabel:ATLMessageInputToolbarTextInputView]; - expect(font).to.equal(toolBar.textInputView.font); +- (void)testToVerifyRightAccessoryButtonColor +{ + [self setRootViewController]; + ATLMessageInputToolbar *toolBar = (ATLMessageInputToolbar *)[tester waitForViewWithAccessibilityLabel:@"Message Input Toolbar"]; + expect([toolBar.rightAccessoryButton titleColorForState:UIControlStateNormal]).to.equal(ATLBlueColor()); + expect([toolBar.rightAccessoryButton titleColorForState:UIControlStateDisabled]).to.equal([UIColor grayColor]); +} + +- (void)testToVerifyDefaultFontForComposerTextInputView +{ + [self setRootViewController]; + ATLMessageInputToolbar *toolBar = (ATLMessageInputToolbar *)[tester waitForViewWithAccessibilityLabel:@"Message Input Toolbar"]; + expect(toolBar.textInputView.font).to.equal([UIFont systemFontOfSize:17]); } -- (void)setRootViewController:(UIViewController *)controller +- (void)setRootViewController { - [self.testInterface presentViewController:controller]; + self.viewController = [ATLSampleConversationViewController conversationViewControllerWithLayerClient:(LYRClient *)self.testInterface.layerClient]; + self.viewController.conversation = self.conversation; + + [self.testInterface presentViewController:self.viewController]; [tester waitForTimeInterval:1]; } diff --git a/Tests/ATLTestInterface.h b/Tests/ATLTestInterface.h index e7d88c220..16554891a 100644 --- a/Tests/ATLTestInterface.h +++ b/Tests/ATLTestInterface.h @@ -37,6 +37,8 @@ LYRMessagePartMock *ATLMessagePartWithText(NSString *text); LYRMessagePartMock *ATLMessagePartWithJPEGImage(UIImage *image); +LYRMessagePartMock *ATLMessagePartWithGIFImage(UIImage *image); + LYRMessagePartMock *ATLMessagePartForImageSize(UIImage *image); LYRMessagePartMock *ATLMessagePartWithLocation(CLLocation *location); diff --git a/Tests/ATLTestInterface.m b/Tests/ATLTestInterface.m index 959d1bacd..c90a8e31c 100644 --- a/Tests/ATLTestInterface.m +++ b/Tests/ATLTestInterface.m @@ -35,6 +35,12 @@ data:UIImageJPEGRepresentation(image, 0.1)]; } +LYRMessagePartMock *ATLMessagePartWithGIFImage(UIImage *image) +{ + return [LYRMessagePartMock messagePartWithMIMEType:ATLMIMETypeImageGIF + data:UIImageJPEGRepresentation(image, 0.1)]; +} + LYRMessagePartMock *ATLMessagePartForImageSize(UIImage *image) { CGSize size = ATLImageSize(image); @@ -98,9 +104,9 @@ - (NSString *)conversationLabelForConversation:(LYRConversationMock *)conversati // Put the latest message sender's name first ATLUserMock *firstUser; - if (![conversation.lastMessage.sentByUserID isEqualToString:self.layerClient.authenticatedUserID]) { + if (![conversation.lastMessage.sender.userID isEqualToString:self.layerClient.authenticatedUserID]) { if (conversation.lastMessage) { - NSPredicate *searchPredicate = [NSPredicate predicateWithFormat:@"SELF.participantIdentifier IN %@", conversation.lastMessage.sentByUserID]; + NSPredicate *searchPredicate = [NSPredicate predicateWithFormat:@"SELF.participantIdentifier IN %@", conversation.lastMessage.sender.userID]; ATLUserMock *lastMessageSender = [[[participants filteredSetUsingPredicate:searchPredicate] allObjects] lastObject]; if (lastMessageSender) { firstUser = lastMessageSender; diff --git a/Tests/ATLTestUtilities.h b/Tests/ATLTestUtilities.h new file mode 100644 index 000000000..2b3c6a39e --- /dev/null +++ b/Tests/ATLTestUtilities.h @@ -0,0 +1,64 @@ +// +// ATLTestUtilities.h +// Atlas +// +// Created by Klemen Verdnik on 2/26/15. +// Copyright (c) 2015 Layer. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +#import +#import +#import + +/** + @abstract Reads the stream content into a NSData object. + @param inputStream Input stream to read the content from. + @return Returns an `NSData` object containing the content of the stream; or `nil` in case of an error. + */ +NSData *ATLTestAttachmentDataFromStream(NSInputStream *inputStream); + +/** + @abstract Generates a test image with the given size. + @param size The size of the output image. + @return An `UIImage` instance. + */ +UIImage *ATLTestAttachmentMakeImageWithSize(CGSize size); + +/** + @abstract Generates a test image with the given size. + @param size The size of the output image. + @param animationSequenceFrame The sequence frame of the animation. + @return An `UIImage` instance. + */ +UIImage *ATLTestAttachmentMakeImageWithSizeAndAnimationSequenceFrame(CGSize imageSize, NSUInteger animationSequenceFrame); + +/** + @abstract Synhchronously grabs the last photo from the Photos Library. + @param library The library to grab the last photo from. + @return Returns ALAsset instance of the last image located in the Photos Library, or `nil` in case of a failure. + */ +ALAsset *ATLAssetTestObtainLastImageFromAssetLibrary(ALAssetsLibrary *library); + +ALAsset *ATLVideoAssetTestObtainLastVideoFromAssetLibrary(ALAssetsLibrary *library); + +/** + @abstract Renders a test video with a given size, framerate and duration. + @param outputFileURL The output path URL where the video file will be located. + @param videoSize The desired size of the video (must conform to h264 size restrictions). + @param framesPerSecond Video framerate in frames per second. + @param duration The length of the video in seconds. + @return Returns `YES` if the file was successfully written to disk; otherwise `NO`. + */ +BOOL ATLTestMakeVideo(NSURL *outputFileURL, CGSize videoSize, NSUInteger framesPerSecond, NSTimeInterval duration); diff --git a/Tests/ATLTestUtilities.m b/Tests/ATLTestUtilities.m new file mode 100644 index 000000000..cf1f8c989 --- /dev/null +++ b/Tests/ATLTestUtilities.m @@ -0,0 +1,276 @@ +// +// ATLTestUtilities.m +// Atlas +// +// Created by Klemen Verdnik on 2/26/15. +// Copyright (c) 2015 Layer. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +#import "ATLTestUtilities.h" +#import +#import + +NSData *ATLTestAttachmentDataFromStream(NSInputStream *inputStream) +{ + if (!inputStream) { + @throw [NSException exceptionWithName:NSInvalidArgumentException reason:@"inputStream cannot be `nil`." userInfo:nil]; + } + NSMutableData *dataFromStream = [NSMutableData data]; + + // Open stream + [inputStream open]; + if (inputStream.streamError) { + NSLog(@"Failed to stream image content with %@", inputStream.streamError); + return nil; + } + + // Start streaming + const NSUInteger bufferSize = 1024; + uint8_t *buffer = malloc(bufferSize); + NSUInteger bytesRead; + do { + bytesRead = [inputStream read:buffer maxLength:(unsigned long)bufferSize]; + if (bytesRead != 0) { + [dataFromStream appendBytes:buffer length:bytesRead]; + } + } while (bytesRead != 0); + free(buffer); + + // Close stream + [inputStream close]; + + // Done + return dataFromStream; +} + +UIImage *ATLTestAttachmentMakeImageWithSizeAndAnimationSequenceFrame(CGSize imageSize, NSUInteger animationSequenceFrame) +{ + CGFloat scaleFactor; + CGFloat xOffset = 0.0f; + CGFloat yOffset = 15.0f; + if (imageSize.width >= imageSize.height) { + scaleFactor = imageSize.height / 350; + xOffset = (imageSize.width / 2) - (580 / 2 * scaleFactor); + yOffset += (imageSize.height / 2) / scaleFactor; + } else { + scaleFactor = imageSize.width / 580; + yOffset *= scaleFactor; + yOffset += (imageSize.height / 2) - (350 / 2 * scaleFactor); + } + + UIGraphicsBeginImageContext(imageSize); + CGContextRef context = UIGraphicsGetCurrentContext(); + CGContextSetRGBFillColor(context, 255.0f, 255.0f, 255.0f, 1.0f); + CGContextFillRect(context, CGRectMake(0, 0, imageSize.width, imageSize.height)); + CGContextTranslateCTM(context, xOffset, yOffset); + CGContextScaleCTM(context, scaleFactor, scaleFactor); + + UIBezierPath *path = [UIBezierPath bezierPath]; + [path setMiterLimit:4]; + float hue = (float)((animationSequenceFrame)%100)/100; + if (animationSequenceFrame == 0) { + [[UIColor blackColor] setFill]; + } else { + [[UIColor colorWithHue:hue saturation:1.0f brightness:1.0f alpha:1.0f] setFill]; + } + [path moveToPoint:CGPointMake(152.64, 175.83)]; + [path addCurveToPoint:CGPointMake(143.64, 184.82) controlPoint1:CGPointMake(152.64, 180.7) controlPoint2:CGPointMake(148.52, 184.82)]; + [path addLineToPoint:CGPointMake(120.88, 184.82)]; + [path addCurveToPoint:CGPointMake(41.18, 105.13) controlPoint1:CGPointMake(72.94, 184.82) controlPoint2:CGPointMake(41.18, 153.06)]; + [path addLineToPoint:CGPointMake(41.18, 82.36)]; + [path addCurveToPoint:CGPointMake(50.17, 73.37) controlPoint1:CGPointMake(41.18, 77.48) controlPoint2:CGPointMake(45.3, 73.37)]; + [path addLineToPoint:CGPointMake(143.64, 73.37)]; + [path addCurveToPoint:CGPointMake(152.64, 82.36) controlPoint1:CGPointMake(148.52, 73.37) controlPoint2:CGPointMake(152.64, 77.48)]; + [path addLineToPoint:CGPointMake(152.64, 175.83)]; + [path closePath]; + [path moveToPoint:CGPointMake(143.64, 57.19)]; + [path addLineToPoint:CGPointMake(50.17, 57.19)]; + [path addCurveToPoint:CGPointMake(25, 82.36) controlPoint1:CGPointMake(36.32, 57.19) controlPoint2:CGPointMake(25, 68.51)]; + [path addLineToPoint:CGPointMake(25, 175.83)]; + [path addCurveToPoint:CGPointMake(50.17, 201) controlPoint1:CGPointMake(25, 189.67) controlPoint2:CGPointMake(36.32, 201)]; + [path addLineToPoint:CGPointMake(143.64, 201)]; + [path addCurveToPoint:CGPointMake(168.81, 175.83) controlPoint1:CGPointMake(157.49, 201) controlPoint2:CGPointMake(168.81, 189.67)]; + [path addLineToPoint:CGPointMake(168.81, 82.36)]; + [path addCurveToPoint:CGPointMake(143.64, 57.19) controlPoint1:CGPointMake(168.81, 68.51) controlPoint2:CGPointMake(157.49, 57.19)]; + [path closePath]; + [path fill]; + + NSString* text = [NSString stringWithFormat:@"%c%c%c%c%c", 76, 97, 121, 101, 114]; + UIFont* font = [UIFont systemFontOfSize: 155]; + CGRect frame = CGRectMake(178, 36.5, 504, 190); + const NSUInteger fps = 30; + for (NSUInteger n=0; n +#import +#import "ATLTestInterface.h" +#import "ATLSampleConversationViewController.h" + +@interface ATLUserMockTest : XCTestCase + +@end + +@implementation ATLUserMockTest + +- (void)testToVerifyCaseInsensitiveSearch +{ + NSSet *set1 = [ATLUserMock participantsWithText:@"Kleme"]; + NSString *fullName1 = ((ATLUserMock*)set1.allObjects.firstObject).fullName; + expect(fullName1).to.equal([ATLUserMock userWithMockUserName:ATLMockUserNameKlemen].fullName); + + NSSet *set2 = [ATLUserMock participantsWithText:@"kleme"]; + NSString *fullName2 = ((ATLUserMock*)set2.allObjects.firstObject).fullName; + expect(fullName2).to.equal([ATLUserMock userWithMockUserName:ATLMockUserNameKlemen].fullName); + + NSSet *set3 = [ATLUserMock participantsWithText:@"bob"]; + NSString *fullName3 = ((ATLUserMock*)set3.allObjects.firstObject).fullName; + expect(fullName3).toNot.equal([ATLUserMock userWithMockUserName:ATLMockUserNameKlemen].fullName); +} + +@end diff --git a/Tests/LYRClientMockTests.m b/Tests/LYRClientMockTests.m index 5f666ad11..69e0d4094 100644 --- a/Tests/LYRClientMockTests.m +++ b/Tests/LYRClientMockTests.m @@ -64,65 +64,12 @@ - (void)testAddMessages LYRMessageMock *message2 = [client newMessageWithParts:@[messagePart2] options:nil error:nil]; [conversation sendMessage:message2 error:nil]; - LYRQuery *query = [LYRQuery queryWithClass:[LYRMessage class]]; - query.predicate = [LYRPredicate predicateWithProperty:@"conversation" operator:LYRPredicateOperatorIsEqualTo value:conversation]; + LYRQuery *query = [LYRQuery queryWithQueryableClass:[LYRMessage class]]; + query.predicate = [LYRPredicate predicateWithProperty:@"conversation" predicateOperator:LYRPredicateOperatorIsEqualTo value:conversation]; NSOrderedSet *messages = [client executeQuery:query error:nil]; expect(messages.count).to.equal(2); } -- (void)testMessagesIndexInConversationPreserved -{ - ATLUserMock *mockUser = [ATLUserMock userWithMockUserName:ATLMockUserNameBlake]; - LYRClientMock *client = [LYRClientMock layerClientMockWithAuthenticatedUserID:mockUser.participantIdentifier]; - - NSSet *participants = [NSSet setWithObject:[[ATLUserMock randomUser] participantIdentifier]]; - LYRConversationMock *conversation = [client newConversationWithParticipants:participants options:nil error:nil]; - - LYRMessagePartMock *messagePart1 = [LYRMessagePartMock messagePartWithText:@"How are you?"]; - LYRMessageMock *message1 = [client newMessageWithParts:@[messagePart1] options:nil error:nil]; - [conversation sendMessage:message1 error:nil]; - - LYRMessagePartMock *messagePart2 = [LYRMessagePartMock messagePartWithText:@"I am well"]; - LYRMessageMock *message2 = [client newMessageWithParts:@[messagePart2] options:nil error:nil]; - [conversation sendMessage:message2 error:nil]; - - LYRQuery *query = [LYRQuery queryWithClass:[LYRMessage class]]; - query.sortDescriptors = @[[NSSortDescriptor sortDescriptorWithKey:@"index" ascending:YES]]; - query.predicate = [LYRPredicate predicateWithProperty:@"conversation" operator:LYRPredicateOperatorIsEqualTo value:conversation]; - NSOrderedSet *messages = [client executeQuery:query error:nil]; - expect(messages.count).to.equal(2); - expect([messages[0] index]).to.equal(0); - expect([messages[1] index]).to.equal(1); - expect(conversation.lastMessage).to.equal(message2); -} - -- (void)testDeletingMessagesReindexesIndexOrder -{ - ATLUserMock *mockUser = [ATLUserMock userWithMockUserName:ATLMockUserNameBlake]; - LYRClientMock *client = [LYRClientMock layerClientMockWithAuthenticatedUserID:mockUser.participantIdentifier]; - - NSSet *participants = [NSSet setWithObject:[[ATLUserMock randomUser] participantIdentifier]]; - LYRConversationMock *conversation = [client newConversationWithParticipants:participants options:nil error:nil]; - - LYRMessagePartMock *messagePart1 = [LYRMessagePartMock messagePartWithText:@"How are you?"]; - LYRMessageMock *message1 = [client newMessageWithParts:@[messagePart1] options:nil error:nil]; - [conversation sendMessage:message1 error:nil]; - - LYRMessagePartMock *messagePart2 = [LYRMessagePartMock messagePartWithText:@"I am well"]; - LYRMessageMock *message2 = [client newMessageWithParts:@[messagePart2] options:nil error:nil]; - [conversation sendMessage:message2 error:nil]; - - [[LYRMockContentStore sharedStore] deleteMessage:message1]; - - LYRQuery *query = [LYRQuery queryWithClass:[LYRMessage class]]; - query.predicate = [LYRPredicate predicateWithProperty:@"conversation" operator:LYRPredicateOperatorIsEqualTo value:conversation]; - NSOrderedSet *messages = [client executeQuery:query error:nil]; - expect(messages.count).to.equal(1); - expect([messages[0] index]).to.equal(0); - expect(messages[0]).to.equal(message2); - expect(conversation.lastMessage).to.equal(message2); -} - - (void)testFetchingConversationByIdentifier { ATLUserMock *mockUser = [ATLUserMock userWithMockUserName:ATLMockUserNameBlake]; @@ -141,12 +88,12 @@ - (void)testFetchingConversationByIdentifier LYRMessageMock *message2 = [client newMessageWithParts:@[messagePart2] options:nil error:nil]; [conversation2 sendMessage:message2 error:nil]; - LYRQuery *query = [LYRQuery queryWithClass:[LYRConversation class]]; - query.predicate = [LYRPredicate predicateWithProperty:@"identifier" operator:LYRPredicateOperatorIsEqualTo value:conversation1.identifier]; + LYRQuery *query = [LYRQuery queryWithQueryableClass:[LYRConversation class]]; + query.predicate = [LYRPredicate predicateWithProperty:@"identifier" predicateOperator:LYRPredicateOperatorIsEqualTo value:conversation1.identifier]; LYRConversationMock *fetchedConversation = [[client executeQuery:query error:nil] lastObject]; expect(conversation1).to.equal(fetchedConversation); - query.predicate = [LYRPredicate predicateWithProperty:@"identifier" operator:LYRPredicateOperatorIsEqualTo value:conversation2.identifier]; + query.predicate = [LYRPredicate predicateWithProperty:@"identifier" predicateOperator:LYRPredicateOperatorIsEqualTo value:conversation2.identifier]; fetchedConversation = [[client executeQuery:query error:nil] lastObject]; expect(conversation2).to.equal(fetchedConversation); } diff --git a/Tests/Resources/boatgif.gif b/Tests/Resources/boatgif.gif new file mode 100644 index 000000000..541d0f0cd Binary files /dev/null and b/Tests/Resources/boatgif.gif differ diff --git a/Tests/Schemes/Unit Tests.xcscheme b/Tests/Schemes/Unit Tests.xcscheme index cf0aa40b9..5a7f6c5d6 100644 --- a/Tests/Schemes/Unit Tests.xcscheme +++ b/Tests/Schemes/Unit Tests.xcscheme @@ -32,10 +32,10 @@ skipped = "NO"> + BlueprintIdentifier = "D0294DD71A93F33900702856" + BuildableName = "UnitTests.xctest" + BlueprintName = "UnitTests" + ReferencedContainer = "container:Atlas.xcodeproj"> @@ -76,15 +76,6 @@ useCustomWorkingDirectory = "NO" buildConfiguration = "Release" debugDocumentVersioning = "YES"> - - - -