目前,这就是我在UIViewController的子视图中播放视频的方式:
override func viewDidAppear(animated: Bool) {
let filePath = NSBundle.mainBundle().pathForResource("musicvideo", ofType: "mp4")
self.moviePlayerController.contentURL = NSURL.fileURLWithPath(filePath)
self.moviePlayerController.play()
self.moviePlayerController.repeatMode = .One
self.moviePlayerController.view.frame = self.view.bounds
self.moviePlayerController.scalingMode = .AspectFill
self.moviePlayerController.controlStyle = .None
self.moviePlayerController.allowsAirPlay = false
self.view.addSubview(self.moviePlayerController.view)
}
Run Code Online (Sandbox Code Playgroud)
我已经通过以下方式阅读了关于禁用音频的方法(根本没有工作).请记住,我正在尝试禁用它,以便不通过音乐应用程序,Spotify等中断当前播放的音乐.
// Playing media items with the applicationMusicPlayer will restore the user's Music state after the application quits.
// The current volume of playing music, in the range of 0.0 to 1.0.
// This property is deprecated -- use MPVolumeView for volume control instead.
Run Code Online (Sandbox Code Playgroud)
1) MPMusicPlayerController.applicationMusicPlayer().volume = …
当AVPlayer到达视频末尾时(对于HTTP直播流),最后一帧会拉伸以填充视图.我如何解决这种拉伸?例如,考虑视频是以纵向模式录制的.在较小的视图中,视频将被加框(带有黑色边框).这很好,直到玩家到达流的末尾.最后一帧然后伸展以填充视图,直到播放器重新启动.
关于如何阻止这种情况发生的任何想法?
编辑:请参阅下面的答案.如果有人对什么是体面的解决方案有任何建议,我仍然愿意为此奖励赏金.
我正在开发一个iOS应用程序,该应用程序可以创建用户池提交的所有视频的源,使用户能够浏览和查看其他人创建的视频.您可以想象,我需要能够在Feed中支持任意数量的视频.
目前,我正在为每个视频创建并保留一个AVPlayer实例,如下所示:
//inside the init method of a UIView
//create the AVPlayer and store it in a strong property
NSString * urlString = @"aRemoteURL.mov";
NSURL * movURL = [NSURL URLWithString:urlString];
_videoPlayer = [[AVPlayer alloc]initWithURL:movURL];
//register callbacks for handling buffering
[_videoPlayer.currentItem addObserver:self forKeyPath:@"playbackBufferEmpty" options:NSKeyValueObservingOptionNew context:nil];
[_videoPlayer.currentItem addObserver:self forKeyPath:@"playbackLikelyToKeepUp" options:NSKeyValueObservingOptionNew context:nil];
//add the AVPlayerLayer to the view so you can see the video
AVPlayerLayer * playerLayer = [AVPlayerLayer playerLayerWithPlayer:_videoPlayer];
playerLayer.frame = self.frame;
[self.layer addSublayer:playerLayer];
Run Code Online (Sandbox Code Playgroud)
当用户点击play它_videoPlayer时我会打电话UIView,一切都很完美.也就是说,直到有足够的视频提交给Feed ... …
我正在编写一个应用程序,需要根据用户的选择显示不同的视频.当用户选择视频时,将调用playVideo功能.视频播放完毕后,视频将再次隐藏.
我的代码如下:
var player: AVPlayer?
func playVideo(String: videoFile) {
self.videoView.isHidden = false
let videoURL: NSURL = Bundle.main.url(forResource: videoFile, withExtension: "mp4")! as NSURL
self.player = AVPlayer(url: videoURL as URL)
let playerLayer = AVPlayerLayer(player: player)
playerLayer.frame = self.videoView.frame
self.videoView.layer.addSublayer(playerLayer)
let duration : Int64 = 0
let preferredTimeScale : Int32 = 1
let seekTime : CMTime = CMTimeMake(duration, preferredTimeScale)
self.player?.seek(to: seekTime)
self.player?.play()
NotificationCenter.default.addObserver(self, selector: #selector(self.playerItemDidReachEnd), name: NSNotification.Name.AVPlayerItemDidPlayToEndTime, object: player?.currentItem)
}
@objc func playerItemDidReachEnd()
{
self.player?.pause()
self.videoView.isHidden = true
NotificationCenter.default.removeObserver(self)
}
Run Code Online (Sandbox Code Playgroud)
但是,根据上面的代码,我有几个问题:
如何优雅地删除/取消分配播放器?如果只使用我当前的代码,它会占用大量内存吗?
每当用户按下按钮时,将调用playVideo功能,并创建并播放相应的播放器.这是正确的方法吗?有没有其他方法或更有效的方式或优雅的方式这样做? …
在Xcode中使用AVPlayer可以帮助我设置和播放本地视频文件的代码吗?(使用AVPlayerLayer和AVPlayerViewController)所有以编程方式完成并使用标准/系统视频?
关心亨宁
我正在使用 Swift 编写一个应用程序,以通过 HLS 查看监控摄像头。我有基本的设备列表工作,我能够转至实时视图并显示流,但是,我需要移动 AVPlayerLayer,但我无法弄清楚这一点。这是我当前的代码:
let player = AVPlayer(URL: url!)
let playerLayer = AVPlayerLayer(player: player)
let view = UIView(frame: CGRectMake(0, 0, screenSize.width, screenSize.height))
self.view.layer.borderWidth = 1
self.view.layer.borderColor = UIColor(red:222/255.0, green:225/255.0, blue:227/255.0, alpha: 1.0).CGColor
self.view.layer.addSublayer(playerLayer)
playerLayer.frame = view.bounds
player.play()
Run Code Online (Sandbox Code Playgroud)
我希望将 AVPlayerLayer 放置在顶部下方 50 点处,因为每个视图场景都有一个标题。
谢谢!
我正在开发一个录制视频的应用程序.录制结束后,我使用图书馆将GIF图像放在上面 .
我的代码用于播放视频和将gif图像作为叠加层
self.avPlayer = [AVPlayer playerWithURL:self.urlstring];
self.avPlayer.actionAtItemEnd = AVPlayerActionAtItemEndNone;
AVPlayerLayer *videoLayer = [AVPlayerLayer playerLayerWithPlayer:self.avPlayer];
videoLayer.frame = self.preview_view.bounds;
videoLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[self.preview_view.layer addSublayer:videoLayer];
NSURL *url = [[NSBundle mainBundle] URLForResource:@"02" withExtension:@"gif"];
self.img_gif.image = [UIImage animatedImageWithAnimatedGIFData:[NSData dataWithContentsOfURL:url]];
Run Code Online (Sandbox Code Playgroud)
但现在我想合并并保存带有此GIF图像叠加的视频.我谷歌它没找到我想要的东西.
谢谢您的帮助
我正在使用Swift 3在iOS应用程序上工作,我在后台播放视频.我的代码完全正常,直到随机出现错误信息:
类型
AVLayerVideoGravityaka(NSString)没有成员resizeAspectFill
出现.我无法弄清楚为什么resizeAspectFill不再被识别.有什么我想念的吗?我已经尝试过清理我的项目,但这并没有解决任何问题.任何帮助将不胜感激:)下面是代码:
import Foundation
import AVFoundation
import UIKit
class HomeViewController: UIViewController {
// MARK: Properties
var avPlayer: AVPlayer!
var avPlayerLayer: AVPlayerLayer!
var paused: Bool = false
var enterButton: UIButton! = UIButton()
override func viewDidLoad() {
super.viewDidLoad()
self.title = "Welcome"
let theURL = Bundle.main.url(forResource:"hannah", withExtension: "mp4")
avPlayer = AVPlayer(url: theURL!)
avPlayerLayer = AVPlayerLayer(player: avPlayer)
avPlayerLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
avPlayer.volume = 0
avPlayer.actionAtItemEnd = .none
avPlayerLayer.frame = view.layer.bounds
view.backgroundColor = .clear
view.layer.insertSublayer(avPlayerLayer, at: 0)
NotificationCenter.default.addObserver(self,
selector: …Run Code Online (Sandbox Code Playgroud) 让AVPlayer视频内容显示在一个视图中的诀窍是什么?
我们使用以下AVPlayer代码,但屏幕上没有显示任何内容.我们知道视频在那里,因为我们能够使用MPMoviePlayerController显示它.
这是我们使用的代码:
AVAsset *asset = [AVAsset assetWithURL:videoTempURL];
AVPlayerItem *item = [[AVPlayerItem alloc] initWithAsset:asset];
AVPlayer *player = [[AVPlayer alloc] initWithPlayerItem:item];
player.actionAtItemEnd = AVPlayerActionAtItemEndNone;
AVPlayerLayer *layer = [AVPlayerLayer playerLayerWithPlayer:player];
// layer.frame = self.view.frame;
[self.view.layer addSublayer:layer];
layer.backgroundColor = [UIColor clearColor].CGColor;
//layer.backgroundColor = [UIColor greenColor].CGColor;
[layer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
[player play];
Run Code Online (Sandbox Code Playgroud)
我们是否为当前视图设置了不正确的图层?
我正在尝试在设备旋转后始终全屏显示 AVPlayer 视频。这是我的完整代码。我不明白为什么视频没有调整大小。即使我使用像 layerClass 这样的子类。设备旋转后,视频视图被剪切,我看不到完整的视频。如果有人有想法。提前致谢。
import UIKit
import AVKit
import AVFoundation
class ViewController: UIViewController {
var player: AVPlayer?
var videoView: VideoContainerView!
var playerLayer: AVPlayerLayer!
override func viewDidLoad() {
super.viewDidLoad()
self.videoView = VideoContainerView()
self.videoView.frame = self.view.bounds
self.videoView.autoresizingMask = [.flexibleWidth, .flexibleHeight]
let path = Bundle.main.path(forResource: "GAVIDEO", ofType: "mp4")
self.player = AVPlayer(url: NSURL(fileURLWithPath: path!) as URL)
self.player?.isMuted = true
self.playerLayer = AVPlayerLayer(player: self.player)
self.playerLayer.frame = self.videoView.bounds
self.playerLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
self.videoView.layer.addSublayer(playerLayer)
self.videoView.layer.masksToBounds = true
self.view.addSubview(self.videoView)
self.player?.play()
}
override func viewWillTransition(to size: CGSize, with coordinator: …Run Code Online (Sandbox Code Playgroud) avplayerlayer ×10
ios ×8
avplayer ×7
swift ×4
objective-c ×3
avfoundation ×2
avplayeritem ×2
uiview ×2
swift3 ×1
video ×1