如何解决使用适用于 .NET 的 AWS 开发工具包的 AWS Elemental MediaConvert CreateJob 示例
我正在尝试更改文件输入的输入剪辑起始时间码和结束时间码,并将剪辑的视频保存到S3存储桶中的文件输出目标强>
目前,我可以使用以下代码执行操作:
using System;
using System.Threading.Tasks;
using Amazon.MediaConvert;
using Amazon.MediaConvert.Model;
namespace MediaConvertNET
{
class Program
{
static async Task MainAsync()
{
String mediaConvertRole = "Your AWS Elemental MediaConvert role ARN";
String fileInput = "s3://yourinputfile";
String fileOutput = "s3://youroutputdestination";
String mediaConvertEndpoint = "";
// If we do not have our customer-specific endpoint
if (String.IsNullOrEmpty(mediaConvertEndpoint))
{
// Obtain the customer-specific MediaConvert endpoint
AmazonMediaConvertClient client = new AmazonMediaConvertClient("AccessKey","AccessSecret",Amazon.RegionEndpoint.USWest1);
DescribeEndpointsRequest describeRequest = new DescribeEndpointsRequest();
DescribeEndpointsResponse describeResponse = await client.DescribeEndpointsAsync(describeRequest);
mediaConvertEndpoint = describeResponse.Endpoints[0].Url;
}
// Since we have a service url for MediaConvert,we do not
// need to set RegionEndpoint. If we do,the ServiceURL will
// be overwritten
AmazonMediaConvertConfig mcConfig = new AmazonMediaConvertConfig
{
ServiceURL = mediaConvertEndpoint,};
AmazonMediaConvertClient mcclient = new AmazonMediaConvertClient("AccessKey",mcConfig);
CreateJobRequest createJobRequest = new CreateJobRequest();
createJobRequest.Role = mediaConvertRole;
createJobRequest.UserMetadata.Add("Customer","Amazon");
#region Create job settings
JobSettings jobSettings = new JobSettings();
jobSettings.AdAvailOffset = 0;
jobSettings.TimecodeConfig = new TimecodeConfig();
jobSettings.TimecodeConfig.source = TimecodeSource.EMbedDED;
createJobRequest.Settings = jobSettings;
#region OutputGroup
OutputGroup ofg = new OutputGroup();
ofg.Name = "File Group";
ofg.OutputGroupSettings = new OutputGroupSettings();
ofg.OutputGroupSettings.Type = OutputGroupType.FILE_GROUP_SETTINGS;
ofg.OutputGroupSettings.FileGroupSettings = new FileGroupSettings();
ofg.OutputGroupSettings.FileGroupSettings.Destination = fileOutput;
Output output = new Output();
output.NameModifier = "_1";
#region VideoDescription
VideoDescription vdes = new VideoDescription();
output.VideoDescription = vdes;
vdes.ScalingBehavior = ScalingBehavior.DEFAULT;
vdes.TimecodeInsertion = VideoTimecodeInsertion.disABLED;
vdes.AntiAlias = AntiAlias.ENABLED;
vdes.Sharpness = 50;
vdes.AfdSignaling = AfdSignaling.NONE;
vdes.DropFrameTimecode = DropFrameTimecode.ENABLED;
vdes.RespondToAfd = RespondToAfd.NONE;
vdes.ColorMetadata = ColorMetadata.INSERT;
vdes.CodecSettings = new VideoCodecSettings();
vdes.CodecSettings.Codec = VideoCodec.H_264;
H264Settings h264 = new H264Settings();
h264.InterlaceMode = H264InterlaceMode.PROGRESSIVE;
h264.NumberReferenceFrames = 3;
h264.Syntax = H264Syntax.DEFAULT;
h264.softness = 0;
h264.GopClosedCadence = 1;
h264.GopSize = 90;
h264.Slices = 1;
h264.GopBReference = H264GopBReference.disABLED;
h264.SlowPal = H264SlowPal.disABLED;
h264.SpatialAdaptiveQuantization = H264SpatialAdaptiveQuantization.ENABLED;
h264.TemporalAdaptiveQuantization = H264TemporalAdaptiveQuantization.ENABLED;
h264.FlickerAdaptiveQuantization = H264FlickerAdaptiveQuantization.disABLED;
h264.EntropyEncoding = H264EntropyEncoding.CABAC;
h264.Bitrate = 2000000;
h264.FramerateControl = H264FramerateControl.SPECIFIED;
h264.RateControlMode = H264RateControlMode.CBR;
h264.CodecProfile = H264CodecProfile.MAIN;
h264.Telecine = H264Telecine.NONE;
h264.MinIInterval = 0;
h264.AdaptiveQuantization = H264AdaptiveQuantization.HIGH;
h264.CodecLevel = H264CodecLevel.AUTO;
h264.FieldEncoding = H264FieldEncoding.PAFF;
h264.SceneChangeDetect = H264SceneChangeDetect.ENABLED;
h264.QualityTuningLevel = H264QualityTuningLevel.SINGLE_PASS;
h264.FramerateConversionAlgorithm = H264FramerateConversionAlgorithm.DUPLICATE_DROP;
h264.UnregisteredSeiTimecode = H264UnregisteredSeiTimecode.disABLED;
h264.GopSizeUnits = H264GopSizeUnits.FRAMES;
h264.ParControl = H264ParControl.SPECIFIED;
h264.NumberBFramesBetweenReferenceFrames = 2;
h264.RepeatPps = H264RepeatPps.disABLED;
h264.FramerateNumerator = 30;
h264.FramerateDenominator = 1;
h264.ParNumerator = 1;
h264.ParDenominator = 1;
output.VideoDescription.CodecSettings.H264Settings = h264;
#endregion VideoDescription
#region AudioDescription
AudioDescription ades = new AudioDescription();
ades.LanguageCodeControl = AudioLanguageCodeControl.FOLLOW_INPUT;
// This name matches one specified in the Inputs below
ades.AudioSourceName = "Audio Selector 1";
ades.CodecSettings = new AudioCodecSettings();
ades.CodecSettings.Codec = AudioCodec.AAC;
AacSettings aac = new AacSettings();
aac.AudioDescriptionbroadcasterMix = AacAudioDescriptionbroadcasterMix.norMAL;
aac.RateControlMode = AacRateControlMode.CBR;
aac.CodecProfile = AacCodecProfile.LC;
aac.CodingMode = AacCodingMode.CODING_MODE_2_0;
aac.RawFormat = AacRawFormat.NONE;
aac.SampleRate = 48000;
aac.Specification = AacSpecification.MPEG4;
aac.Bitrate = 64000;
ades.CodecSettings.AacSettings = aac;
output.AudioDescriptions.Add(ades);
#endregion AudioDescription
#region Mp4 Container
output.ContainerSettings = new ContainerSettings();
output.ContainerSettings.Container = ContainerType.MP4;
Mp4Settings mp4 = new Mp4Settings();
mp4.cslgAtom = Mp4cslgAtom.INCLUDE;
mp4.FreeSpaceBox = Mp4FreeSpaceBox.EXCLUDE;
mp4.MoovPlacement = Mp4MoovPlacement.PROGRESSIVE_DOWNLOAD;
output.ContainerSettings.Mp4Settings = mp4;
#endregion Mp4 Container
ofg.Outputs.Add(output);
createJobRequest.Settings.OutputGroups.Add(ofg);
#endregion OutputGroup
#region Input
Input input = new input();
InputClipping ip = new InputClipping();
ip.StartTimecode= "00:00:00:00";
ip.EndTimecode= "00:00:05:00";
input.FilterEnable = InputFilterEnable.AUTO;
input.PsiControl = InputPsiControl.USE_PSI;
input.FilterStrength = 0;
input.DeblockFilter = InputDeblockFilter.disABLED;
input.DenoiseFilter = InputDenoiseFilter.disABLED;
input.TimecodeSource = InputTimecodeSource.ZEROBASED;
input.InputClippings.Add(ip);
input.FileInput = fileInput;
AudioSelector audsel = new AudioSelector();
audsel.Offset = 0;
audsel.DefaultSelection = AudioDefaultSelection.NOT_DEFAULT;
audsel.ProgramSelection = 1;
audsel.SelectorType = AudioSelectorType.TRACK;
audsel.Tracks.Add(1);
input.AudioSelectors.Add("Audio Selector 1",audsel);
input.VideoSelector = new VideoSelector();
input.VideoSelector.ColorSpace = ColorSpace.FOLLOW;
createJobRequest.Settings.Inputs.Add(input);
#endregion Input
#endregion Create job settings
try
{
CreateJobResponse createJobResponse =await mcclient.CreateJobAsync(createJobRequest);
Console.WriteLine("Job Id: {0}",createJobResponse.Job.Id);
}
catch (BadRequestException bre)
{
// If the enpoint was bad
if (bre.Message.StartsWith("You must use the customer-"))
{
// The exception contains the correct endpoint; extract it
mediaConvertEndpoint = bre.Message.Split('\'')[1];
// Code to retry query
}
}
}
static void Main(string[] args)
{
Task.Run(() => MainAsync()).GetAwaiter().GetResult();
}
}
}
有几点我想知道:
-
是否必须按照我的意愿创建 VideoDescription 对象和 AudioDescription 对象 只进行裁剪操作
InputClipping ip = new InputClipping(); ip.StartTimecode= "00:00:00:00"; ip.EndTimecode= "00:00:05:00";
2.CreateJobResponse createJobResponse =await mcclient.CreateJobAsync(createJobRequest); 如何检查我的工作流程是否完成
解决方法
对于问题 1: 根据您的工作流程,您的输出对象必须包含以下描述组合:
- VideoDescription 和 AudioDescription(视频和音频混合)
- 视频说明(仅限视频)
- 音频描述(仅音频)
这将确保您的输出只有视频/视频和音频/音频。
MediaConvert 将在您定义的剪辑区域中对输入进行编码。该服务不会将视频或音频传递到输出(有时在视频社区中称为转换)。将 MediaConvert 的输出视为一个全新的文件。
问题 2:
我建议使用 CloudWatch Events 来监控工作进展。请参阅以下文档:
https://docs.aws.amazon.com/mediaconvert/latest/ug/how-mediaconvert-jobs-progress.html
https://docs.aws.amazon.com/mediaconvert/latest/ug/cloudwatch_events.html
问题 3: 请参阅我在 How to retrieve list of encoded files and paths after a done job in MediaConvert?
中的帖子您可以通过收集 COMPLETE CloudWatch 事件来获取此信息。
版权声明:本文内容由互联网用户自发贡献,该文观点与技术仅代表作者本人。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如发现本站有涉嫌侵权/违法违规的内容, 请发送邮件至 dio@foxmail.com 举报,一经查实,本站将立刻删除。