由于只是简单的描述如何获取系统相册内的照片和视频,这里就不贴我那很丑的的布局的Demo了
步骤
1.plist文件添加访问权限
2.导入需要支持的库 import Photos
import Photos
3.获取所有内容,官方API里面有很多获取到 PHFetchResult
,可自行获取
lazy var photosArray : PHFetchResult = {
let allOptions = PHFetchOptions()
// 按照时间排序
/// 这里设置的key = 'creationDate' 排序字段可以按照已有的属性去查 ascending:升序和降序属性
/*
open class func fetchAssets(in assetCollection: PHAssetCollection, options: PHFetchOptions?) -> PHFetchResult
open class func fetchAssets(withLocalIdentifiers identifiers: [String], options: PHFetchOptions?) -> PHFetchResult // includes hidden assets by default
open class func fetchKeyAssets(in assetCollection: PHAssetCollection, options: PHFetchOptions?) -> PHFetchResult?
open class func fetchAssets(withBurstIdentifier burstIdentifier: String, options: PHFetchOptions?) -> PHFetchResult
// Fetches PHAssetSourceTypeUserLibrary assets by default (use includeAssetSourceTypes option to override)
open class func fetchAssets(with options: PHFetchOptions?) -> PHFetchResult
open class func fetchAssets(with mediaType: PHAssetMediaType, options: PHFetchOptions?) -> PHFetchResult
// assetURLs are URLs retrieved from ALAsset's ALAssetPropertyAssetURL
@available(iOS, introduced: 8.0, deprecated: 11.0, message: "Will be removed in a future release")
open class func fetchAssets(withALAssetURLs assetURLs: [URL], options: PHFetchOptions?) -> PHFetchResult
*/
allOptions.sortDescriptors = [NSSortDescriptor.init(key: "creationDate", ascending: true)]
let allResults = PHAsset.fetchAssets(with: allOptions)
return allResults
}()
效果图:这里使用的是两个UICollectionView .
- 获取某一个相册内的个体: let asset: PHAsset? = photosArray[indexPath.row]
5.以下贴官方API的关于PHAsset部分属性的代码
// Playback style describes how the asset should be presented to the user (regardless of the backing media for that asset). Use this value to choose the type of view and the appropriate APIs on the PHImageManager to display this asset
@available(iOS 11.0, *)
open var playbackStyle: PHAsset.PlaybackStyle { get }
/// 判断获取的是图片,视频,录音,还是不知道...属性
/*
public enum PHAssetMediaType : Int {
case unknown
case image
case video
case audio
}
*/
open var mediaType: PHAssetMediaType { get }
open var mediaSubtypes: PHAssetMediaSubtype { get }
open var pixelWidth: Int { get }
open var pixelHeight: Int { get }
open var creationDate: Date? { get }
open var modificationDate: Date? { get }
open var location: CLLocation? { get }
open var duration: TimeInterval { get }
// a hidden asset will be excluded from moment collections, but may still be included in other smart or regular album collections
open var isHidden: Bool { get }
open var isFavorite: Bool { get }
open var burstIdentifier: String? { get }
open var burstSelectionTypes: PHAssetBurstSelectionType { get }
open var representsBurst: Bool { get }
6.判断asset.mediaType的属性.
7.根据mediaType来根据类方法获取想要的内容,类方法有很多,自行根据需求获取
如下:
if asset!.mediaType == .image{
let size = CGSize(width: asset!.pixelWidth, height: asset!.pixelHeight)
PHCachingImageManager.default().requestImage(for: asset!, targetSize: size, contentMode: .aspectFill, options: .none) { (img, dic) in
self.imageView.image = img
}
}
else if asset!.mediaType == .video {
let option = PHVideoRequestOptions()
option.version = .current
option.deliveryMode = .automatic
option.isNetworkAccessAllowed = true
PHCachingImageManager.default().requestAVAsset(forVideo: asset!, options: option) { (avsset, mix, dict) in
if avsset != nil {
// guard let uslAsset: AVURLAsset = avsset as? AVURLAsset else {return}
let generator = AVAssetImageGenerator.init(asset: avsset!)
generator.appliesPreferredTrackTransform = true
let time = CMTimeMakeWithSeconds(0.0,preferredTimescale: 600)
var actualTime:CMTime = CMTimeMake(value: 0,timescale: 0)
let imageRef:CGImage = try! generator.copyCGImage(at: time, actualTime: &actualTime)
DispatchQueue.main.async {
self.imageView.image = UIImage.init(cgImage: imageRef)
let getSeconds = NSInteger(round(CMTimeGetSeconds(avsset!.duration)))
let seconds = getSeconds%60
let minutes = getSeconds%3600/60
let hours = getSeconds/3600
}
}
}
}