ffmpeg -hide_banner -loglevel panic -i test.jpg test.png
ffmpeg -i in.MTS -vf yadif=1 -acodec ac3 -ab 192k -vcodec mpeg4 -f mp4 -y -qscale 0 out.mp4
ffmpeg -r 25 -f image2 -s 1920x1080 -i pic.%04d.png -vcodec libx264 -crf 25 -pix_fmt yuv420p test.mp4
import os,glob,shlex,subprocess imgSeqPath = 'D:/ExamplePathRoot/ExampleName/ExampleVersion/example.%04d.exr' # 0001 format image names movPath = 'D:/OutputMov/ExampleName/example.mov' file_list = glob.glob( os.path.join(os.path.dirname(imgSeqPath), re.sub('\.?%0\d+d$', '*', base) + ext )) if len(file_list) == 0: return prefix, ext = os.path.splitext(os.path.basename(file_list[0])) start_number = re.findall('\d+$', prefix)[0] enc = 'D:/toolPath/ffmpeg.exe -y -r 24 -start_number {0} -i "{1}" -an -vcodec libx264 -preset slow -crf 22 -threads 0 "{2}"'.format(start_number, imgSeqPath, movPath) subprocess.Popen(shlex.split(enc), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# profile: 0 proxy, 1 LT, 2 SQ, 3 HQ # pix format: yuv422p10le , for 444, use yuva444p10le # -qscale:v 11: best 0-32 worst # ref: https://trac.ffmpeg.org/wiki/Encode/VFX enc = 'D:/toolPath/ffmpeg.exe -y -r 24 -start_number %s -i "%s" -an -c:v prores_ks -profile:v 2 -pix_fmt yuv422p10le -vendor ap10 "%s"' % (start_number, imgSeqPath, movPath)
ffmpeg -i input.jpg -vf scale=320:-1 output_320.png
ffmpeg.exe -i transparent_video_audio.mov -c:v libvpx-vp9 -pix_fmt yuva420p -b:v 2000k out.webm
ffmpeg.exe -i "d:\out\me.mpg" -c:v libx264 -c:a libfaac -crf 20 -preset:v veryslow "d:\out\me_out.mov"
# about -ss before -i and after -i difference, which can make cut video works or not works # ref: https://ottverse.com/trim-cut-video-using-start-endtime-reencoding-ffmpeg/ ffmpeg.exe -i video.mp4 -c copy -ss 00:00:09 -to 00:00:21 video_cut.mp4 # -ss in front means: start time (from 9s), duration time (21s) ffmpeg.exe -ss 00:00:09 -i video.mp4 -c copy -to 00:00:21 video_cut.mp4
ffmpeg -i input.mp4 -vf "fps=10,scale=640:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse" -loop 0 output.gif
#ref: https://ottverse.com/stack-videos-horizontally-vertically-grid-with-ffmpeg/ ffmpeg -i input0.mp4 -i input1.mp4 -i input2.mp4 -i input3.mp4 -filter_complex "[0:v][1:v]hstack=inputs=2[top]; [2:v][3:v]hstack=inputs=2[bottom]; [top][bottom]vstack=inputs=2[v]" -map "[v]" finalOutput.mp4
ffmpeg -i image.jpg -vf "scale=1024:1024:force_original_aspect_ratio=decrease,pad=1024:1024:-1:-1:color=white" -q:v 1 tex_image.jpg # longest side, so basically original size with padding to square ffmpeg -i image.jpg -vf "scale=max(iw\,ih):max(iw\,ih):force_original_aspect_ratio=decrease,pad=max(iw\,ih):max(iw\,ih):-1:-1:color=white" -q:v 1 tex_image.jpg
ffmpeg.exe -i big_recording.gif -vf "select=not(mod(n-1\,10)),scale=600:-1" -vsync vfr light_10th_600w.gif